feat(rnd): Add Node & Graph level execution stats instrumentation (#7957)

### **User description**
### Background

The scope of this change is collecting the required information that will be needed for the execution analytics.

### Changes 🏗️

* Add sentry integration.
* Refactor logging_metadata on manager.py.
* Collect graph-level & node-level instrumentation.
* Introduced `stats` column for `AgentNodeExecution` & `AgentGraphExecution`.
This commit is contained in:
Zamil Majdy
2024-09-03 14:45:19 -05:00
committed by GitHub
parent 7db85a8990
commit c2a79d2f10
14 changed files with 294 additions and 371 deletions

View File

@@ -23,10 +23,12 @@ from autogpt_server.util import json, mock
class GraphExecution(BaseModel):
graph_exec_id: str
start_node_execs: list["NodeExecution"]
graph_id: str
class NodeExecution(BaseModel):
graph_exec_id: str
graph_id: str
node_exec_id: str
node_id: str
data: BlockInput
@@ -243,7 +245,7 @@ async def upsert_execution_input(
async def upsert_execution_output(
node_exec_id: str,
output_name: str,
output_data: Any,
output_data: str, # JSON serialized data.
) -> None:
"""
Insert AgentNodeExecutionInputOutput record for as one of AgentNodeExecution.Output.
@@ -251,12 +253,26 @@ async def upsert_execution_output(
await AgentNodeExecutionInputOutput.prisma().create(
data={
"name": output_name,
"data": json.dumps(output_data),
"data": output_data,
"referencedByOutputExecId": node_exec_id,
}
)
async def update_graph_execution_stats(graph_exec_id: str, stats: dict[str, Any]):
await AgentGraphExecution.prisma().update(
where={"id": graph_exec_id},
data={"stats": json.dumps(stats)},
)
async def update_node_execution_stats(node_exec_id: str, stats: dict[str, Any]):
await AgentNodeExecution.prisma().update(
where={"id": node_exec_id},
data={"stats": json.dumps(stats)},
)
async def update_execution_status(
node_exec_id: str, status: ExecutionStatus, execution_data: BlockInput | None = None
) -> ExecutionResult:

View File

@@ -21,11 +21,21 @@ from autogpt_server.data.execution import (
merge_execution_input,
parse_execution_output,
update_execution_status,
update_graph_execution_stats,
update_node_execution_stats,
upsert_execution_input,
upsert_execution_output,
)
from autogpt_server.data.graph import Graph, Link, Node, get_graph, get_node
from autogpt_server.util import json
from autogpt_server.util.decorator import error_logged, time_measured
from autogpt_server.util.logging import configure_logging
from autogpt_server.util.metrics import (
metric_graph_count,
metric_graph_timing,
metric_node_payload,
metric_node_timing,
)
from autogpt_server.util.service import AppService, expose, get_service_client
from autogpt_server.util.settings import Config
from autogpt_server.util.type import convert
@@ -33,11 +43,19 @@ from autogpt_server.util.type import convert
logger = logging.getLogger(__name__)
def get_log_metadata(graph_eid: str, node_eid: str, block_name: str = "-") -> dict:
def get_log_metadata(
graph_eid: str,
graph_id: str,
node_eid: str,
node_id: str,
block_name: str,
) -> dict:
return {
"component": "ExecutionManager",
"graph_eid": graph_eid,
"graph_id": graph_id,
"node_eid": node_eid,
"node_id": node_id,
"block_name": block_name,
}
@@ -62,6 +80,7 @@ def execute_node(
The subsequent node to be enqueued, or None if there is no subsequent node.
"""
graph_exec_id = data.graph_exec_id
graph_id = data.graph_id
node_exec_id = data.node_exec_id
node_id = data.node_id
@@ -82,9 +101,15 @@ def execute_node(
return
# Sanity check: validate the execution input.
log_metadata = get_log_metadata(graph_exec_id, node_exec_id, node_block.name)
exec_data, error = validate_exec(node, data.data, resolve_input=False)
if exec_data is None:
log_metadata = get_log_metadata(
graph_eid=graph_exec_id,
graph_id=graph_id,
node_eid=node_exec_id,
node_id=node_id,
block_name=node_block.name,
)
input_data, error = validate_exec(node, data.data, resolve_input=False)
if input_data is None:
logger.error(
"Skip execution, input validation error",
extra={
@@ -94,20 +119,23 @@ def execute_node(
return
# Execute the node
exec_data_str = str(exec_data).encode("utf-8").decode("unicode_escape")
input_data_str = json.dumps(input_data)
metric_node_payload("input_size", len(input_data_str), tags=log_metadata)
logger.info(
"Executed node with input",
extra={"json_fields": {**log_metadata, "input": exec_data_str}},
extra={"json_fields": {**log_metadata, "input": input_data_str}},
)
update_execution(ExecutionStatus.RUNNING)
try:
for output_name, output_data in node_block.execute(exec_data):
for output_name, output_data in node_block.execute(input_data):
output_data_str = json.dumps(output_data)
metric_node_payload("output_size", len(output_data_str), tags=log_metadata)
logger.info(
"Node produced output",
extra={"json_fields": {**log_metadata, output_name: output_data}},
extra={"json_fields": {**log_metadata, output_name: output_data_str}},
)
wait(upsert_execution_output(node_exec_id, output_name, output_data))
wait(upsert_execution_output(node_exec_id, output_name, output_data_str))
for execution in _enqueue_next_nodes(
api_client=api_client,
@@ -115,6 +143,7 @@ def execute_node(
node=node,
output=(output_name, output_data),
graph_exec_id=graph_exec_id,
graph_id=graph_id,
log_metadata=log_metadata,
):
yield execution
@@ -148,6 +177,7 @@ def _enqueue_next_nodes(
node: Node,
output: BlockData,
graph_exec_id: str,
graph_id: str,
log_metadata: dict,
) -> list[NodeExecution]:
def wait(f: Coroutine[Any, Any, T]) -> T:
@@ -162,6 +192,7 @@ def _enqueue_next_nodes(
api_client.send_execution_update(exec_update.model_dump())
return NodeExecution(
graph_exec_id=graph_exec_id,
graph_id=graph_id,
node_exec_id=node_exec_id,
node_id=node_id,
data=data,
@@ -369,8 +400,33 @@ class Executor:
cls.agent_server_client = get_agent_server_client()
@classmethod
@error_logged
def on_node_execution(cls, q: ExecutionQueue[NodeExecution], data: NodeExecution):
log_metadata = get_log_metadata(data.graph_exec_id, data.node_exec_id)
log_metadata = get_log_metadata(
graph_eid=data.graph_exec_id,
graph_id=data.graph_id,
node_eid=data.node_exec_id,
node_id=data.node_id,
block_name="-",
)
timing_info, _ = cls._on_node_execution(q, data, log_metadata)
metric_node_timing("walltime", timing_info.wall_time, tags=log_metadata)
metric_node_timing("cputime", timing_info.cpu_time, tags=log_metadata)
cls.loop.run_until_complete(
update_node_execution_stats(
data.node_exec_id,
{
"walltime": timing_info.wall_time,
"cputime": timing_info.cpu_time,
},
)
)
@classmethod
@time_measured
def _on_node_execution(
cls, q: ExecutionQueue[NodeExecution], data: NodeExecution, log_metadata: dict
):
try:
cls.logger.info(
"Start node execution",
@@ -402,6 +458,8 @@ class Executor:
def on_graph_executor_start(cls):
configure_logging()
cls.logger = logging.getLogger("graph_executor")
cls.loop = asyncio.new_event_loop()
cls.loop.run_until_complete(db.connect())
cls.pool_size = Config().num_node_workers
cls.executor = ProcessPoolExecutor(
max_workers=cls.pool_size,
@@ -410,8 +468,34 @@ class Executor:
cls.logger.info(f"Graph executor started with max-{cls.pool_size} node workers")
@classmethod
def on_graph_execution(cls, graph_data: GraphExecution):
log_metadata = get_log_metadata(graph_data.graph_exec_id, "*")
@error_logged
def on_graph_execution(cls, data: GraphExecution):
log_metadata = get_log_metadata(
graph_eid=data.graph_exec_id,
graph_id=data.graph_id,
node_id="*",
node_eid="*",
block_name="-",
)
timing_info, node_count = cls._on_graph_execution(data, log_metadata)
metric_graph_timing("walltime", timing_info.wall_time, tags=log_metadata)
metric_graph_timing("cputime", timing_info.cpu_time, tags=log_metadata)
metric_graph_count("nodecount", node_count, tags=log_metadata)
cls.loop.run_until_complete(
update_graph_execution_stats(
data.graph_exec_id,
{
"walltime": timing_info.wall_time,
"cputime": timing_info.cpu_time,
"nodecount": node_count,
},
)
)
@classmethod
@time_measured
def _on_graph_execution(cls, graph_data: GraphExecution, log_metadata: dict) -> int:
cls.logger.info(
"Start graph execution",
extra={
@@ -420,6 +504,7 @@ class Executor:
}
},
)
node_executed = 0
try:
queue = ExecutionQueue[NodeExecution]()
@@ -447,6 +532,7 @@ class Executor:
while queue.empty() and futures:
for node_id, future in list(futures.items()):
if future.done():
node_executed += 1
del futures[node_id]
elif queue.empty():
cls.wait_future(future)
@@ -469,6 +555,8 @@ class Executor:
},
)
return node_executed
@classmethod
def wait_future(cls, future: Future, timeout: int | None = 3):
try:
@@ -539,6 +627,7 @@ class ExecutionManager(AppService):
starting_node_execs.append(
NodeExecution(
graph_exec_id=node_exec.graph_exec_id,
graph_id=node_exec.graph_id,
node_exec_id=node_exec.node_exec_id,
node_id=node_exec.node_id,
data=node_exec.input_data,
@@ -552,6 +641,7 @@ class ExecutionManager(AppService):
self.agent_server_client.send_execution_update(exec_update.model_dump())
graph_exec = GraphExecution(
graph_id=graph_id,
graph_exec_id=graph_exec_id,
start_node_execs=starting_node_execs,
)

View File

@@ -0,0 +1,66 @@
import functools
import logging
import os
import time
from typing import Callable, Tuple, TypeVar
from pydantic import BaseModel
class TimingInfo(BaseModel):
cpu_time: float
wall_time: float
def _start_measurement() -> Tuple[float, float]:
return time.time(), os.times()[0] + os.times()[1]
def _end_measurement(
start_wall_time: float, start_cpu_time: float
) -> Tuple[float, float]:
end_wall_time = time.time()
end_cpu_time = os.times()[0] + os.times()[1]
return end_wall_time - start_wall_time, end_cpu_time - start_cpu_time
T = TypeVar("T")
logger = logging.getLogger(__name__)
def time_measured(func: Callable[..., T]) -> Callable[..., Tuple[TimingInfo, T]]:
"""
Decorator to measure the time taken by a function to execute.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs) -> Tuple[TimingInfo, T]:
start_wall_time, start_cpu_time = _start_measurement()
try:
result = func(*args, **kwargs)
finally:
wall_duration, cpu_duration = _end_measurement(
start_wall_time, start_cpu_time
)
timing_info = TimingInfo(cpu_time=cpu_duration, wall_time=wall_duration)
return timing_info, result
return wrapper
def error_logged(func: Callable[..., T]) -> Callable[..., T | None]:
"""
Decorator to suppress and log any exceptions raised by a function.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs) -> T | None:
try:
return func(*args, **kwargs)
except Exception as e:
logger.exception(
f"Error when calling function {func.__name__} with arguments {args} {kwargs}: {e}"
)
return wrapper

View File

@@ -0,0 +1,38 @@
import sentry_sdk
from sentry_sdk import metrics
from autogpt_server.util.settings import Settings
sentry_dsn = Settings().secrets.sentry_dsn
sentry_sdk.init(dsn=sentry_dsn, traces_sample_rate=1.0, profiles_sample_rate=1.0)
def emit_distribution(
name: str,
key: str,
value: float,
unit: str = "none",
tags: dict[str, str] | None = None,
):
metrics.distribution(
key=f"{name}__{key}",
value=value,
unit=unit,
tags=tags or {},
)
def metric_node_payload(key: str, value: float, tags: dict[str, str]):
emit_distribution("NODE_EXECUTION", key, value, unit="byte", tags=tags)
def metric_node_timing(key: str, value: float, tags: dict[str, str]):
emit_distribution("NODE_EXECUTION", key, value, unit="second", tags=tags)
def metric_graph_count(key: str, value: int, tags: dict[str, str]):
emit_distribution("GRAPH_EXECUTION", key, value, tags=tags)
def metric_graph_timing(key: str, value: float, tags: dict[str, str]):
emit_distribution("GRAPH_EXECUTION", key, value, unit="second", tags=tags)

View File

@@ -117,6 +117,8 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
smtp_username: str = Field(default="", description="SMTP username")
smtp_password: str = Field(default="", description="SMTP password")
sentry_dsn: str = Field(default="", description="Sentry DSN")
# Add more secret fields as needed
model_config = SettingsConfigDict(

View File

@@ -0,0 +1,5 @@
-- AlterTable
ALTER TABLE "AgentGraphExecution" ADD COLUMN "stats" TEXT;
-- AlterTable
ALTER TABLE "AgentNodeExecution" ADD COLUMN "stats" TEXT;

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "agpt"
@@ -1004,65 +1004,6 @@ files = [
python-dateutil = "*"
pytz = ">2021.1"
[[package]]
name = "cx_Freeze"
version = "7.1.1"
description = "Create standalone executables from Python scripts"
optional = false
python-versions = ">=3.8"
files = []
develop = true
[package.dependencies]
cx_Logging = {version = ">=3.1", markers = "sys_platform == \"win32\""}
dmgbuild = {version = ">=1.6.1", markers = "sys_platform == \"darwin\""}
filelock = {version = ">=3.12.3", markers = "sys_platform == \"linux\""}
lief = {version = ">=0.12.0,<0.15.0", markers = "sys_platform == \"win32\""}
patchelf = {version = ">=0.14", markers = "sys_platform == \"linux\" and (platform_machine == \"x86_64\" or platform_machine == \"i686\" or platform_machine == \"aarch64\" or platform_machine == \"armv7l\" or platform_machine == \"ppc64le\" or platform_machine == \"s390x\")"}
setuptools = ">=65.6.3,<71"
wheel = ">=0.42.0,<=0.43.0"
[package.extras]
dev = ["bump-my-version (==0.24.0)", "cibuildwheel (==2.19.1)", "pre-commit (==3.5.0)", "pre-commit (==3.7.1)"]
doc = ["furo (==2024.5.6)", "myst-parser (==3.0.1)", "sphinx (==7.3.7)", "sphinx-new-tab-link (==0.4.0)", "sphinx-tabs (==3.4.5)"]
test = ["coverage (==7.5.4)", "pluggy (==1.5.0)", "pytest (==8.2.2)", "pytest-cov (==5.0.0)", "pytest-datafiles (==3.0.0)", "pytest-mock (==3.14.0)", "pytest-timeout (==2.3.1)", "pytest-xdist[psutil] (==3.6.1)"]
[package.source]
type = "git"
url = "https://github.com/ntindle/cx_Freeze.git"
reference = "main"
resolved_reference = "876fe77c97db749b7b0aed93c12142a7226ee7e4"
[[package]]
name = "cx-logging"
version = "3.2.0"
description = "Python and C interfaces for logging"
optional = false
python-versions = ">=3.8"
files = [
{file = "cx_Logging-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d40c1a4dcd54eff3335f34a2f9af9e29f29b45441fa99f90d6710ffae130a826"},
{file = "cx_Logging-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705c1baf94ac49ba7bc807deeef18e1613b27ebdfacb15ab5702ec07f5f04446"},
{file = "cx_Logging-3.2.0-cp310-cp310-win32.whl", hash = "sha256:3a0e153c9ed70ea5f8cea27c34052fd815d93d0858c7867f926274b5ccb04ee5"},
{file = "cx_Logging-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:20ff0e7ee0da525ca8b31bd4af49819aee220307896f98a68d8794a5c1f6fde2"},
{file = "cx_Logging-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369dc509e65ceb3244281e89a8db99a7d67bde524d7bc9ab650282d44e153a06"},
{file = "cx_Logging-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e7713ffed11d7020b35623470c24af3223175ae38ea93797712e55219648e1"},
{file = "cx_Logging-3.2.0-cp311-cp311-win32.whl", hash = "sha256:77449b6c48902ec24d876643d66a37d841c6aa425f4f0ae7ff64bf53e3565e11"},
{file = "cx_Logging-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0cfcd5c88694e062465db50410be53ce5a957ad3d50b90a8f4bd03c98f73c7b"},
{file = "cx_Logging-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:031fe74ca61bf6084d64e835a7801b3c8d8758737fcadae234ba8b09a16d1368"},
{file = "cx_Logging-3.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d29550bec4fdd7ce05b23ae6d3539a33060feb24ff39d5cbb89925d59b018b7"},
{file = "cx_Logging-3.2.0-cp312-cp312-win32.whl", hash = "sha256:4328097f6034be241e02146af8e199382e7f30019272c26768e4cd3e5122d3f9"},
{file = "cx_Logging-3.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2496386a36f63233fe77e7e68539910b9429df2f922be1af71309be59dca11f3"},
{file = "cx_Logging-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03b725bbbe455a87c92c125e246a8c0c1f44bb20c7af0048e4c209cfc2a1a180"},
{file = "cx_Logging-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac2957603ec1b1d3f7274b834ade52fb3004e0d4b9e3414b8593c60de5e3cf39"},
{file = "cx_Logging-3.2.0-cp38-cp38-win32.whl", hash = "sha256:08bf89fa288c4aae9ad34534813aea1e275beb052bc40c7e4ef76862954c61b7"},
{file = "cx_Logging-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:e7c883b1cfd76a44ded9677333bebb01e50970b52bdc5746cc2f45a661cebb7a"},
{file = "cx_Logging-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00f23737f81d6b88a74b4f33bce5ea114283aab153a0d6c290b0626e5deb7978"},
{file = "cx_Logging-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f40486414ba63fef003167c6942a08fd699b3197d359ca56441ddf2d82a7e6d"},
{file = "cx_Logging-3.2.0-cp39-cp39-win32.whl", hash = "sha256:7d85861c70012c24415e488cff14ec6ba029d097b4d2b4d3effd6a5b1d5ce287"},
{file = "cx_Logging-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:aafd5c2156751e473b4a8d331d01e70e872b235068da2158a29e377777baac63"},
{file = "cx_Logging-3.2.0.tar.gz", hash = "sha256:bdbad6d2e6a0cc5bef962a34d7aa1232e88ea9f3541d6e2881675b5e7eab5502"},
]
[[package]]
name = "cymem"
version = "2.0.8"
@@ -1177,27 +1118,6 @@ files = [
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
]
[[package]]
name = "dmgbuild"
version = "1.6.1"
description = "macOS command line utility to build disk images"
optional = false
python-versions = ">=3.7"
files = [
{file = "dmgbuild-1.6.1-py3-none-any.whl", hash = "sha256:45dba6af4a64872c6a91eb335ebeaf5e1f4f4f39c89fd77cf40e841bd1226166"},
{file = "dmgbuild-1.6.1.tar.gz", hash = "sha256:7ced2603d684e29c22b4cd507d1e15a1907e91b86259924b8cfe480d80553b43"},
]
[package.dependencies]
ds-store = ">=1.1.0"
mac-alias = ">=2.0.1"
[package.extras]
badge-icons = ["pyobjc-framework-Quartz (>=3.0.4)"]
dev = ["pre-commit", "tox"]
docs = ["sphinx", "sphinx-autobuild", "sphinx-rtd-theme"]
test = ["pytest", "pytest-cov", "pytest-tldr"]
[[package]]
name = "docker"
version = "7.1.0"
@@ -1220,25 +1140,6 @@ docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"]
ssh = ["paramiko (>=2.4.3)"]
websockets = ["websocket-client (>=1.3.0)"]
[[package]]
name = "ds-store"
version = "1.3.1"
description = "Manipulate Finder .DS_Store files from Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "ds_store-1.3.1-py3-none-any.whl", hash = "sha256:fbacbb0bd5193ab3e66e5a47fff63619f15e374ffbec8ae29744251a6c8f05b5"},
{file = "ds_store-1.3.1.tar.gz", hash = "sha256:c27d413caf13c19acb85d75da4752673f1f38267f9eb6ba81b3b5aa99c2d207c"},
]
[package.dependencies]
mac-alias = ">=2.0.1"
[package.extras]
dev = ["pre-commit", "tox"]
docs = ["sphinx", "sphinx-autobuild", "sphinx-rtd-theme"]
test = ["pytest", "pytest-cov", "pytest-tldr"]
[[package]]
name = "duckduckgo-search"
version = "6.2.1"
@@ -2502,45 +2403,6 @@ marisa-trie = ">=0.7.7"
build = ["build", "twine"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "lief"
version = "0.14.1"
description = "Library to instrument executable formats"
optional = false
python-versions = ">=3.8"
files = [
{file = "lief-0.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a9a94882f9af110fb01b4558a58941d2352b9a4ae3fef15570a3fab921ff462"},
{file = "lief-0.14.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:bcc06f24f64fa6f20372d625ce60c40a7a6f669e11bdd02c2f0b8c5c6d09a5ee"},
{file = "lief-0.14.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d22f804eee7f1b4a4b37e7a3d35e2003c4c054f3450d40389e54c8ac9fc2a5db"},
{file = "lief-0.14.1-cp310-cp310-manylinux_2_28_x86_64.manylinux_2_27_x86_64.whl", hash = "sha256:26134815adecfd7f15dfbdf12cc64df25bcf3d0db917cf115fc3b296d09be496"},
{file = "lief-0.14.1-cp310-cp310-win32.whl", hash = "sha256:6ca0220189698599df30b8044f43fb1fc7ba919fb9ef6047c892f9faee16393a"},
{file = "lief-0.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:c321234b50997c217107c09e69f53518c37fac637f8735c968c258dd4c748fb2"},
{file = "lief-0.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ca365c704c6b6b1ce631b92fea2eddaf93d66c897a0ec4ab51e9ab9e3345920"},
{file = "lief-0.14.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:1f3c40eadff07a4c8fa74f1e268f9fa70b68f39b6795a00cd82160ca6782d5c3"},
{file = "lief-0.14.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:c202ed13b641db2e1f8a24743fb0c85595b32ea92cc3c517d3f7a9977e16dcb4"},
{file = "lief-0.14.1-cp311-cp311-manylinux_2_28_x86_64.manylinux_2_27_x86_64.whl", hash = "sha256:fd481bfdfef04e8be4d200bca771d0d9394d9146c6cd403f9e58c80c4196a24e"},
{file = "lief-0.14.1-cp311-cp311-win32.whl", hash = "sha256:473e9a37beef8db8bab1a777271aa49cce44dfe35af65cb8fad576377518c0bd"},
{file = "lief-0.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:24f687244e14d4a8307430babc5c712a1dd4e519172886ad4aeb9825f88f7569"},
{file = "lief-0.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6df40e3750b8b26f88a6b28ac01db7338cdb6158f28363c755bf36452ce20d28"},
{file = "lief-0.14.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:e7f7a55db2fcf269569f9e9fa5ea752620396de17bd9d29fc8b29e176975ecdb"},
{file = "lief-0.14.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:50795b51884b76a78c481d6d069d992561c217180bd81cf12554180389eff0a3"},
{file = "lief-0.14.1-cp312-cp312-manylinux_2_28_x86_64.manylinux_2_27_x86_64.whl", hash = "sha256:497b88f9c9aaae999766ba188744ee35c5f38b4b64016f7dbb7037e9bf325382"},
{file = "lief-0.14.1-cp312-cp312-win32.whl", hash = "sha256:08bad88083f696915f8dcda4042a3bfc514e17462924ec8984085838b2261921"},
{file = "lief-0.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:e131d6158a085f8a72124136816fefc29405c725cd3695ce22a904e471f0f815"},
{file = "lief-0.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:df650fa05ca131e4dfeb42c77985e1eb239730af9944bc0aadb1dfac8576e0e8"},
{file = "lief-0.14.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b4e76eeb48ca2925c6ca6034d408582615f2faa855f9bb11482e7acbdecc4803"},
{file = "lief-0.14.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:016e4fac91303466024154dd3c4b599e8b7c52882f72038b62a2be386d98c8f9"},
{file = "lief-0.14.1-cp38-cp38-manylinux_2_28_x86_64.manylinux_2_27_x86_64.whl", hash = "sha256:9a5c7732a3ce53b306c8180ab64fdfb36d8cd9df91aedd9e2b4dad9faf47492b"},
{file = "lief-0.14.1-cp38-cp38-win32.whl", hash = "sha256:7030c22a4446ea2ac673fd50128e9c639121c0a4dae11ca1cd8cc20d62d26e7e"},
{file = "lief-0.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a35ceeee74bb9bb4c7171f4bca814576a3aa6dec16a0a9469e5743db0a9ba0c"},
{file = "lief-0.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abb15e4de34e70661fd35e87e2634abf0ae57a8c8ac78d02ad4259f5a5817e26"},
{file = "lief-0.14.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:33d062340c709c1a33539d221ea3cb764cbb8d7c9ee8aae28bf9797bc8715a0b"},
{file = "lief-0.14.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:66deb1b26de43acb2fd0b2fc5e6be70093eaaa93797332cc4613e163164c77e7"},
{file = "lief-0.14.1-cp39-cp39-manylinux_2_28_x86_64.manylinux_2_27_x86_64.whl", hash = "sha256:c1c15bd3e5b15da6dcc0ba75d5549f15bfbf9214c0d8e3938f85877a40c352d9"},
{file = "lief-0.14.1-cp39-cp39-win32.whl", hash = "sha256:ebcbe4eadd33d8cf2c6015f44d6c9b72f81388af745938e633c4bb90262b2036"},
{file = "lief-0.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:2db3eb282a35daf51f89c6509226668a08fb6a6d1f507dd549dd9f077585db11"},
]
[[package]]
name = "litellm"
version = "1.41.28"
@@ -2727,22 +2589,6 @@ html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=3.0.10)"]
[[package]]
name = "mac-alias"
version = "2.2.2"
description = "Generate/parse Mac OS Alias records from Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "mac_alias-2.2.2-py3-none-any.whl", hash = "sha256:504ab8ac546f35bbd75ad014d6ad977c426660aa721f2cd3acf3dc2f664141bd"},
{file = "mac_alias-2.2.2.tar.gz", hash = "sha256:c99c728eb512e955c11f1a6203a0ffa8883b26549e8afe68804031aa5da856b7"},
]
[package.extras]
dev = ["pre-commit", "tox"]
docs = ["sphinx", "sphinx-autobuild", "sphinx-rtd-theme"]
test = ["pytest", "pytest-cov", "pytest-tldr"]
[[package]]
name = "marisa-trie"
version = "1.2.0"
@@ -3583,8 +3429,6 @@ files = [
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"},
{file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"},
{file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"},
{file = "orjson-3.10.6-cp313-none-win32.whl", hash = "sha256:efdf2c5cde290ae6b83095f03119bdc00303d7a03b42b16c54517baa3c4ca3d0"},
{file = "orjson-3.10.6-cp313-none-win_amd64.whl", hash = "sha256:8e190fe7888e2e4392f52cafb9626113ba135ef53aacc65cd13109eb9746c43e"},
{file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"},
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"},
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"},
@@ -3670,25 +3514,6 @@ files = [
{file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"},
]
[[package]]
name = "patchelf"
version = "0.17.2.1"
description = "A small utility to modify the dynamic linker and RPATH of ELF executables."
optional = false
python-versions = "*"
files = [
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:fc329da0e8f628bd836dfb8eaf523547e342351fa8f739bf2b3fe4a6db5a297c"},
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:ccb266a94edf016efe80151172c26cff8c2ec120a57a1665d257b0442784195d"},
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.musllinux_1_1_ppc64le.whl", hash = "sha256:f47b5bdd6885cfb20abdd14c707d26eb6f499a7f52e911865548d4aa43385502"},
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_17_s390x.manylinux2014_s390x.musllinux_1_1_s390x.whl", hash = "sha256:a9e6ebb0874a11f7ed56d2380bfaa95f00612b23b15f896583da30c2059fcfa8"},
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.musllinux_1_1_i686.whl", hash = "sha256:3c8d58f0e4c1929b1c7c45ba8da5a84a8f1aa6a82a46e1cfb2e44a4d40f350e5"},
{file = "patchelf-0.17.2.1-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.musllinux_1_1_x86_64.whl", hash = "sha256:d1a9bc0d4fd80c038523ebdc451a1cce75237cfcc52dbd1aca224578001d5927"},
{file = "patchelf-0.17.2.1.tar.gz", hash = "sha256:a6eb0dd452ce4127d0d5e1eb26515e39186fa609364274bc1b0b77539cfa7031"},
]
[package.extras]
test = ["importlib-metadata", "pytest"]
[[package]]
name = "pathspec"
version = "0.12.1"
@@ -6399,20 +6224,6 @@ files = [
{file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"},
]
[[package]]
name = "wheel"
version = "0.43.0"
description = "A built-package format for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"},
{file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"},
]
[package.extras]
test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
[[package]]
name = "wrapt"
version = "1.16.0"
@@ -6641,4 +6452,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "685689797ef05f362356f9e105459010c90551a4e327e49b2bb57db21792f2d2"
content-hash = "126731188e8fdc7df0bd2dc92cd069fcd2b90edd5e1065cebd8f4adcedf982b5"

View File

@@ -0,0 +1,5 @@
-- AlterTable
ALTER TABLE "AgentGraphExecution" ADD COLUMN "stats" TEXT;
-- AlterTable
ALTER TABLE "AgentNodeExecution" ADD COLUMN "stats" TEXT;

View File

@@ -12,11 +12,11 @@ generator client {
// User model to mirror Auth provider users
model User {
id String @id // This should match the Supabase user ID
email String @unique
name String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id // This should match the Supabase user ID
email String @unique
name String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
// Relations
AgentGraphs AgentGraph[]
@@ -124,6 +124,8 @@ model AgentGraphExecution {
// Link to User model
userId String
user User @relation(fields: [userId], references: [id])
stats String? // JSON serialized object
}
// This model describes the execution of an AgentNode.
@@ -148,6 +150,8 @@ model AgentNodeExecution {
queuedTime DateTime?
startedTime DateTime?
endedTime DateTime?
stats String? // JSON serialized object
}
// This model describes the output of an AgentNodeExecution.

View File

@@ -46,9 +46,9 @@ youtube-transcript-api = "^0.6.2"
aio-pika = "^9.4.3"
redis = "^5.0.8"
sentry-sdk = "1.45.0"
[tool.poetry.group.dev.dependencies]
cx-freeze = { git = "https://github.com/ntindle/cx_Freeze.git", rev = "main", develop = true }
poethepoet = "^0.26.1"
httpx = "^0.27.0"
pytest-watcher = "^0.4.2"

View File

@@ -11,11 +11,11 @@ generator client {
// User model to mirror Auth provider users
model User {
id String @id // This should match the Supabase user ID
email String @unique
name String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id // This should match the Supabase user ID
email String @unique
name String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
// Relations
AgentGraphs AgentGraph[]
@@ -123,6 +123,8 @@ model AgentGraphExecution {
// Link to User model
userId String
user User @relation(fields: [userId], references: [id])
stats String? // JSON serialized object
}
// This model describes the execution of an AgentNode.
@@ -147,6 +149,8 @@ model AgentNodeExecution {
queuedTime DateTime?
startedTime DateTime?
endedTime DateTime?
stats String? // JSON serialized object
}
// This model describes the output of an AgentNodeExecution.
@@ -187,4 +191,4 @@ model AgentGraphExecutionSchedule {
user User @relation(fields: [userId], references: [id])
@@index([isEnabled])
}
}

View File

@@ -1,141 +0,0 @@
import platform
from pathlib import Path
from pkgutil import iter_modules
from typing import Union
from cx_Freeze import Executable, setup # type: ignore
packages = [
m.name
for m in iter_modules()
if m.ispkg and m.module_finder and "poetry" in m.module_finder.path # type: ignore
]
packages.append("collections")
packages.append("autogpt_server.util.service")
packages.append("autogpt_server.executor.manager")
packages.append("autogpt_server.util.service")
# set the icon based on the platform
icon = "../../assets/gpt_dark_RGB.ico"
if platform.system() == "Darwin":
icon = "../../assets/gpt_dark_RGB.icns"
elif platform.system() == "Linux":
icon = "../../assets/gpt_dark_RGB.png"
def txt_to_rtf(input_file: Union[str, Path], output_file: Union[str, Path]) -> None:
"""
Convert a text file to RTF format.
Args:
input_file (Union[str, Path]): Path to the input text file.
output_file (Union[str, Path]): Path to the output RTF file.
Returns:
None
"""
input_path = Path(input_file)
output_path = Path(output_file)
with input_path.open("r", encoding="utf-8") as txt_file:
content = txt_file.read()
# RTF header
rtf = r"{\rtf1\ansi\deff0 {\fonttbl {\f0 Times New Roman;}}\f0\fs24 "
# Replace newlines with RTF newline
rtf += content.replace("\n", "\\par ")
# Close RTF document
rtf += "}"
with output_path.open("w", encoding="utf-8") as rtf_file:
rtf_file.write(rtf)
# Convert LICENSE to LICENSE.rtf
license_file = "LICENSE.rtf"
txt_to_rtf("../../LICENSE", license_file)
setup(
name="AutoGPT Server",
url="https://agpt.co",
# The entry points of the application
executables=[
Executable(
"autogpt_server/app.py",
target_name="agpt_server",
base="console",
icon=icon,
),
Executable(
"autogpt_server/cli.py",
target_name="agpt_server_cli",
base="console",
icon=icon,
),
],
options={
# Options for building all the executables
"build_exe": {
"packages": packages,
"includes": [
"autogpt_server",
"prisma",
],
# Exclude the two module from readability.compat as it causes issues
"excludes": ["readability.compat.two"],
"include_files": [
# source, destination in the bundle
# (../frontend, example_files) would also work, but you'd need to load
# the frontend differently in the data.py to correctly
# get the path when frozen.
("../example_files", "example_files"),
],
},
# Mac .app specific options
"bdist_mac": {
"bundle_name": "AutoGPT",
"iconfile": "../../assets/gpt_dark_RGB.icns",
},
# Mac .dmg specific options
"bdist_dmg": {
"applications_shortcut": True,
"volume_label": "AutoGPTServer",
"background": "builtin-arrow",
"license": {
"default-language": "en_US",
"licenses": {"en_US": license_file},
"buttons": {
"en_US": [
"English",
"Agree",
"Disagree",
"Print",
"Save",
"If you agree, click Agree to continue the installation. If "
"you do not agree, click Disagree to cancel the installation.",
]
},
},
},
# Windows .msi specific options
"bdist_msi": {
"target_name": "AutoGPTServer",
"add_to_path": True,
"install_icon": "../../assets/gpt_dark_RGB.ico",
"license_file": license_file,
},
# Linux .appimage specific options
"bdist_appimage": {},
# Linux rpm specific options
"bdist_rpm": {
"name": "AutoGPTServer",
"description": "AutoGPT Server",
"version": "0.1",
"license": "UNKNOWNORPROPRIETARY",
"url": "https://agpt.co",
"long_description": "AutoGPT Server",
},
},
)

View File

@@ -0,0 +1,26 @@
import time
from autogpt_server.util.decorator import error_logged, time_measured
@time_measured
def example_function(a: int, b: int, c: int) -> int:
time.sleep(0.5)
return a + b + c
@error_logged
def example_function_with_error(a: int, b: int, c: int) -> int:
raise ValueError("This is a test error")
def test_timer_decorator():
info, res = example_function(1, 2, 3)
assert info.cpu_time >= 0
assert info.wall_time >= 0.4
assert res == 6
def test_error_decorator():
res = example_function_with_error(1, 2, 3)
assert res is None

View File

@@ -1,9 +1,6 @@
from autogpt_server.util.service import (
AppService,
PyroNameServer,
expose,
get_service_client,
)
import pytest
from autogpt_server.util.service import AppService, expose, get_service_client
class TestService(AppService):
@@ -29,10 +26,10 @@ class TestService(AppService):
return self.run_and_wait(add_async(a, b))
def test_service_creation():
with PyroNameServer():
with TestService():
client = get_service_client(TestService)
assert client.add(5, 3) == 8
assert client.subtract(10, 4) == 6
assert client.fun_with_async(5, 3) == 8
@pytest.mark.asyncio(scope="session")
async def test_service_creation(server):
with TestService():
client = get_service_client(TestService)
assert client.add(5, 3) == 8
assert client.subtract(10, 4) == 6
assert client.fun_with_async(5, 3) == 8