mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-10 07:38:04 -05:00
Merge branch 'dev' into kpczerwinski/open-1998-ux-make-some-text-non-selectable-on-the-builder-page
This commit is contained in:
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -28,9 +28,9 @@ To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
3. Run the following command:
|
||||
```
|
||||
cp supabase/docker/.env.example .env.local
|
||||
cp supabase/docker/.env.example .env
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env.local` in the `supabase/docker` directory. You can modify the `.env.local` file to add your own environment variables.
|
||||
This command will copy the `.env.example` file to `.env` in the `supabase/docker` directory. You can modify the `.env` file to add your own environment variables.
|
||||
|
||||
4. Run the following command:
|
||||
```
|
||||
@@ -46,9 +46,9 @@ To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
6. Run the following command:
|
||||
```
|
||||
cp .env.example .env
|
||||
cp .env.example .env.local
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env` in the `frontend` directory. You can modify the `.env` within this folder to add your own environment variables for the frontend application.
|
||||
This command will copy the `.env.example` file to `.env.local` in the `frontend` directory. You can modify the `.env.local` within this folder to add your own environment variables for the frontend application.
|
||||
|
||||
7. Run the following command:
|
||||
```
|
||||
|
||||
14
autogpt_platform/autogpt_libs/poetry.lock
generated
14
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -1189,13 +1189,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.6.0"
|
||||
version = "2.6.1"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"},
|
||||
{file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"},
|
||||
{file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"},
|
||||
{file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1271,13 +1271,13 @@ websockets = ">=11,<13"
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "5.1.1"
|
||||
version = "5.2.0"
|
||||
description = "Python client for Redis database and key-value store"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "redis-5.1.1-py3-none-any.whl", hash = "sha256:f8ea06b7482a668c6475ae202ed8d9bcaa409f6e87fb77ed1043d912afd62e24"},
|
||||
{file = "redis-5.1.1.tar.gz", hash = "sha256:f6c997521fedbae53387307c5d0bf784d9acc28d9f1d058abeac566ec4dbed72"},
|
||||
{file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"},
|
||||
{file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1724,4 +1724,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "44af7722ca3d2788fc817129ac43477b71eea9921d51502a63f755cb04e3f254"
|
||||
content-hash = "f80654aae542b1f2f3a44a01f197f87ffbaea52f474dd2cc2b72b8d56b155563"
|
||||
|
||||
@@ -11,14 +11,14 @@ colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.6.0"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pyjwt = "^2.8.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.9.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.0.8"
|
||||
redis = "^5.2.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -143,8 +143,11 @@ class AgentInputBlock(Block):
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(description="The value to be passed as input.")
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
value: Any = SchemaField(
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
description: str = SchemaField(
|
||||
description="The description of the input.",
|
||||
default="",
|
||||
|
||||
@@ -10,7 +10,7 @@ from prisma.types import AgentGraphInclude
|
||||
from pydantic import BaseModel
|
||||
from pydantic_core import PydanticUndefinedType
|
||||
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock, BlockType
|
||||
from backend.data.block import BlockInput, get_block, get_blocks
|
||||
from backend.data.db import BaseDbModel, transaction
|
||||
from backend.data.execution import ExecutionStatus
|
||||
@@ -209,16 +209,15 @@ class Graph(GraphMeta):
|
||||
if block is None:
|
||||
raise ValueError(f"Invalid block {node.block_id} for node #{node.id}")
|
||||
|
||||
if not for_run:
|
||||
continue # Skip input completion validation, unless when executing.
|
||||
|
||||
provided_inputs = set(
|
||||
[sanitize(name) for name in node.input_default]
|
||||
+ [sanitize(link.sink_name) for link in node.input_links]
|
||||
)
|
||||
for name in block.input_schema.get_required_fields():
|
||||
if name not in provided_inputs and not isinstance(
|
||||
block, AgentInputBlock
|
||||
if name not in provided_inputs and (
|
||||
for_run # Skip input completion validation, unless when executing.
|
||||
or block.block_type == BlockType.INPUT
|
||||
or block.block_type == BlockType.OUTPUT
|
||||
):
|
||||
raise ValueError(
|
||||
f"Node {block.name} #{node.id} required input missing: `{name}`"
|
||||
|
||||
@@ -1,695 +1,125 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import contextlib
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import wraps
|
||||
from typing import Annotated, Any, Dict
|
||||
import typing
|
||||
|
||||
import fastapi
|
||||
import fastapi.responses
|
||||
import starlette.middleware.cors
|
||||
import uvicorn
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Body, Depends, FastAPI, HTTPException, Request
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from backend.data import block, db
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.user import get_or_create_user, migrate_and_encrypt_user_integrations
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler
|
||||
from backend.server.model import CreateGraph, SetGraphActiveVersion
|
||||
from backend.util.service import AppService, get_service_client
|
||||
from backend.util.settings import AppEnvironment, Config, Settings
|
||||
import backend.data.block
|
||||
import backend.data.db
|
||||
import backend.data.user
|
||||
import backend.server.routers.v1
|
||||
import backend.util.service
|
||||
import backend.util.settings
|
||||
|
||||
from .utils import get_user_id
|
||||
|
||||
settings = Settings()
|
||||
settings = backend.util.settings.Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AgentServer(AppService):
|
||||
_test_dependency_overrides = {}
|
||||
_user_credit_model = get_user_credit_model()
|
||||
@contextlib.asynccontextmanager
|
||||
async def lifespan_context(app: fastapi.FastAPI):
|
||||
await backend.data.db.connect()
|
||||
await backend.data.block.initialize_blocks()
|
||||
await backend.data.user.migrate_and_encrypt_user_integrations()
|
||||
yield
|
||||
await backend.data.db.disconnect()
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.use_redis = True
|
||||
|
||||
@classmethod
|
||||
def get_port(cls) -> int:
|
||||
return Config().agent_server_port
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(self, _: FastAPI):
|
||||
await db.connect()
|
||||
await block.initialize_blocks()
|
||||
await migrate_and_encrypt_user_integrations()
|
||||
yield
|
||||
await db.disconnect()
|
||||
|
||||
def run_service(self):
|
||||
docs_url = "/docs" if settings.config.app_env == AppEnvironment.LOCAL else None
|
||||
app = FastAPI(
|
||||
title="AutoGPT Agent Server",
|
||||
description=(
|
||||
"This server is used to execute agents that are created by the "
|
||||
"AutoGPT system."
|
||||
),
|
||||
summary="AutoGPT Agent Server",
|
||||
version="0.1",
|
||||
lifespan=self.lifespan,
|
||||
docs_url=docs_url,
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
if self._test_dependency_overrides:
|
||||
app.dependency_overrides.update(self._test_dependency_overrides)
|
||||
return handler
|
||||
|
||||
logger.debug(
|
||||
f"FastAPI CORS allow origins: {Config().backend_cors_allow_origins}"
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=Config().backend_cors_allow_origins,
|
||||
docs_url = (
|
||||
"/docs"
|
||||
if settings.config.app_env == backend.util.settings.AppEnvironment.LOCAL
|
||||
else None
|
||||
)
|
||||
|
||||
app = fastapi.FastAPI(
|
||||
title="AutoGPT Agent Server",
|
||||
description=(
|
||||
"This server is used to execute agents that are created by the "
|
||||
"AutoGPT system."
|
||||
),
|
||||
summary="AutoGPT Agent Server",
|
||||
version="0.1",
|
||||
lifespan=lifespan_context,
|
||||
docs_url=docs_url,
|
||||
)
|
||||
|
||||
app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
||||
app.add_exception_handler(500, handle_internal_http_error(500))
|
||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"])
|
||||
|
||||
|
||||
@app.get(path="/health", tags=["health"], dependencies=[])
|
||||
async def health():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
class AgentServer(backend.util.service.AppProcess):
|
||||
def run(self):
|
||||
server_app = starlette.middleware.cors.CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"], # Allows all methods
|
||||
allow_headers=["*"], # Allows all headers
|
||||
)
|
||||
|
||||
health_router = APIRouter()
|
||||
health_router.add_api_route(
|
||||
path="/health",
|
||||
endpoint=self.health,
|
||||
methods=["GET"],
|
||||
tags=["health"],
|
||||
)
|
||||
|
||||
# Define the API routes
|
||||
api_router = APIRouter(prefix="/api")
|
||||
api_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
# Import & Attach sub-routers
|
||||
import backend.server.integrations.router
|
||||
import backend.server.routers.analytics
|
||||
|
||||
api_router.include_router(
|
||||
backend.server.integrations.router.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
api_router.include_router(
|
||||
backend.server.routers.analytics.router,
|
||||
prefix="/analytics",
|
||||
tags=["analytics"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/auth/user",
|
||||
endpoint=self.get_or_create_user_route,
|
||||
methods=["POST"],
|
||||
tags=["auth"],
|
||||
)
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/blocks",
|
||||
endpoint=self.get_graph_blocks,
|
||||
methods=["GET"],
|
||||
tags=["blocks"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/blocks/{block_id}/execute",
|
||||
endpoint=self.execute_graph_block,
|
||||
methods=["POST"],
|
||||
tags=["blocks"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.get_graphs,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.get_templates,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.create_new_graph,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.create_new_template,
|
||||
methods=["POST"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.get_template,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.delete_graph,
|
||||
methods=["DELETE"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/{version}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
endpoint=self.set_graph_active_version,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/input_schema",
|
||||
endpoint=self.get_graph_input_schema,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
endpoint=self.execute_graph,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
endpoint=self.list_graph_runs,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}",
|
||||
endpoint=self.get_graph_run_node_execution_results,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
|
||||
endpoint=self.stop_graph_run,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.create_schedule,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.get_execution_schedules,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/schedules/{schedule_id}",
|
||||
endpoint=self.update_schedule,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/credits",
|
||||
endpoint=self.get_user_credits,
|
||||
methods=["GET"],
|
||||
)
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/settings",
|
||||
endpoint=self.update_configuration,
|
||||
methods=["POST"],
|
||||
tags=["settings"],
|
||||
)
|
||||
|
||||
app.add_exception_handler(500, self.handle_internal_http_error)
|
||||
|
||||
app.include_router(api_router)
|
||||
app.include_router(health_router)
|
||||
|
||||
uvicorn.run(
|
||||
app,
|
||||
host=Config().agent_api_host,
|
||||
port=Config().agent_api_port,
|
||||
log_config=None,
|
||||
server_app,
|
||||
host=backend.util.settings.Config().agent_api_host,
|
||||
port=backend.util.settings.Config().agent_api_port,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_execute_graph(
|
||||
graph_id: str, node_input: dict[typing.Any, typing.Any], user_id: str
|
||||
):
|
||||
return await backend.server.routers.v1.execute_graph(
|
||||
graph_id, node_input, user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_create_graph(
|
||||
create_graph: backend.server.routers.v1.CreateGraph,
|
||||
user_id: str,
|
||||
is_template=False,
|
||||
):
|
||||
return await backend.server.routers.v1.create_new_graph(create_graph, user_id)
|
||||
|
||||
@staticmethod
|
||||
async def test_get_graph_run_status(
|
||||
graph_id: str, graph_exec_id: str, user_id: str
|
||||
):
|
||||
return await backend.server.routers.v1.get_graph_run_status(
|
||||
graph_id, graph_exec_id, user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_get_graph_run_node_execution_results(
|
||||
graph_id: str, graph_exec_id: str, user_id: str
|
||||
):
|
||||
return await backend.server.routers.v1.get_graph_run_node_execution_results(
|
||||
graph_id, graph_exec_id, user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_delete_graph(graph_id: str, user_id: str):
|
||||
return await backend.server.routers.v1.delete_graph(graph_id, user_id)
|
||||
|
||||
def set_test_dependency_overrides(self, overrides: dict):
|
||||
self._test_dependency_overrides = overrides
|
||||
|
||||
def _apply_overrides_to_methods(self):
|
||||
for attr_name in dir(self):
|
||||
attr = getattr(self, attr_name)
|
||||
if callable(attr) and hasattr(attr, "__annotations__"):
|
||||
setattr(self, attr_name, self._override_method(attr))
|
||||
|
||||
# TODO: fix this with some proper refactoring of the server
|
||||
def _override_method(self, method):
|
||||
@wraps(method)
|
||||
async def wrapper(*args, **kwargs):
|
||||
sig = inspect.signature(method)
|
||||
for param_name, param in sig.parameters.items():
|
||||
if param.annotation is inspect.Parameter.empty:
|
||||
continue
|
||||
if isinstance(param.annotation, Depends) or ( # type: ignore
|
||||
isinstance(param.annotation, type) and issubclass(param.annotation, Depends) # type: ignore
|
||||
):
|
||||
dependency = param.annotation.dependency if isinstance(param.annotation, Depends) else param.annotation # type: ignore
|
||||
if dependency in self._test_dependency_overrides:
|
||||
kwargs[param_name] = self._test_dependency_overrides[
|
||||
dependency
|
||||
]()
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
@property
|
||||
@thread_cached
|
||||
def execution_manager_client(self) -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
@property
|
||||
@thread_cached
|
||||
def execution_scheduler_client(self) -> ExecutionScheduler:
|
||||
return get_service_client(ExecutionScheduler)
|
||||
|
||||
@classmethod
|
||||
def handle_internal_http_error(cls, request: Request, exc: Exception):
|
||||
return JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"error": str(exc),
|
||||
},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def get_or_create_user_route(cls, user_data: dict = Depends(auth_middleware)):
|
||||
user = await get_or_create_user(user_data)
|
||||
return user.model_dump()
|
||||
|
||||
@classmethod
|
||||
def get_graph_blocks(cls) -> list[dict[Any, Any]]:
|
||||
blocks = [cls() for cls in block.get_blocks().values()]
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks]
|
||||
|
||||
@classmethod
|
||||
def execute_graph_block(
|
||||
cls, block_id: str, data: BlockInput
|
||||
) -> CompletedBlockOutput:
|
||||
obj = block.get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
|
||||
output = defaultdict(list)
|
||||
for name, data in obj.execute(data):
|
||||
output[name].append(data)
|
||||
return output
|
||||
|
||||
@classmethod
|
||||
async def get_graphs(
|
||||
cls,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
|
||||
@classmethod
|
||||
async def get_templates(
|
||||
cls, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(filter_by="template", user_id=user_id)
|
||||
|
||||
@classmethod
|
||||
async def get_graph(
|
||||
cls,
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
version: int | None = None,
|
||||
hide_credentials: bool = False,
|
||||
) -> graph_db.Graph:
|
||||
graph = await graph_db.get_graph(
|
||||
graph_id, version, user_id=user_id, hide_credentials=hide_credentials
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graph
|
||||
|
||||
@classmethod
|
||||
async def get_template(
|
||||
cls, graph_id: str, version: int | None = None
|
||||
) -> graph_db.Graph:
|
||||
graph = await graph_db.get_graph(graph_id, version, template=True)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Template #{graph_id} not found."
|
||||
)
|
||||
return graph
|
||||
|
||||
@classmethod
|
||||
async def get_graph_all_versions(
|
||||
cls, graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not graphs:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graphs
|
||||
|
||||
@classmethod
|
||||
async def create_new_graph(
|
||||
cls, create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await cls.create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
@classmethod
|
||||
async def create_new_template(
|
||||
cls, create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await cls.create_graph(create_graph, is_template=True, user_id=user_id)
|
||||
|
||||
class DeleteGraphResponse(TypedDict):
|
||||
version_counts: int
|
||||
|
||||
@classmethod
|
||||
async def delete_graph(
|
||||
cls, graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> DeleteGraphResponse:
|
||||
return {
|
||||
"version_counts": await graph_db.delete_graph(graph_id, user_id=user_id)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
async def create_graph(
|
||||
cls,
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = create_graph.graph
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(reassign_graph_id=True)
|
||||
|
||||
return await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
@classmethod
|
||||
async def update_graph(
|
||||
cls,
|
||||
graph_id: str,
|
||||
graph: graph_db.Graph,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> graph_db.Graph:
|
||||
# Sanity check
|
||||
if graph.id and graph.id != graph_id:
|
||||
raise HTTPException(400, detail="Graph ID does not match ID in URI")
|
||||
|
||||
# Determine new version
|
||||
existing_versions = await graph_db.get_graph_all_versions(
|
||||
graph_id, user_id=user_id
|
||||
)
|
||||
if not existing_versions:
|
||||
raise HTTPException(404, detail=f"Graph #{graph_id} not found")
|
||||
latest_version_number = max(g.version for g in existing_versions)
|
||||
graph.version = latest_version_number + 1
|
||||
|
||||
latest_version_graph = next(
|
||||
v for v in existing_versions if v.version == latest_version_number
|
||||
)
|
||||
if latest_version_graph.is_template != graph.is_template:
|
||||
raise HTTPException(
|
||||
400, detail="Changing is_template on an existing graph is forbidden"
|
||||
)
|
||||
graph.is_active = not graph.is_template
|
||||
graph.reassign_ids()
|
||||
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id, version=new_graph_version.version, user_id=user_id
|
||||
)
|
||||
|
||||
return new_graph_version
|
||||
|
||||
@classmethod
|
||||
async def set_graph_active_version(
|
||||
cls,
|
||||
graph_id: str,
|
||||
request_body: SetGraphActiveVersion,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
):
|
||||
new_active_version = request_body.active_graph_version
|
||||
if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id):
|
||||
raise HTTPException(
|
||||
404, f"Graph #{graph_id} v{new_active_version} not found"
|
||||
)
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id,
|
||||
version=request_body.active_graph_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
def execute_graph(
|
||||
self,
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[str, Any]: # FIXME: add proper return type
|
||||
try:
|
||||
graph_exec = self.execution_manager_client.add_execution(
|
||||
graph_id, node_input, user_id=user_id
|
||||
)
|
||||
return {"id": graph_exec["graph_exec_id"]}
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
async def stop_graph_run(
|
||||
self, graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
if not await execution_db.get_graph_execution(graph_exec_id, user_id):
|
||||
raise HTTPException(
|
||||
404, detail=f"Agent execution #{graph_exec_id} not found"
|
||||
)
|
||||
|
||||
await asyncio.to_thread(
|
||||
lambda: self.execution_manager_client.cancel_execution(graph_exec_id)
|
||||
)
|
||||
|
||||
# Retrieve & return canceled graph execution in its final state
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
@classmethod
|
||||
async def get_graph_input_schema(
|
||||
cls,
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[graph_db.InputSchemaItem]:
|
||||
try:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
return graph.get_input_schema() if graph else []
|
||||
except Exception:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
@classmethod
|
||||
async def list_graph_runs(
|
||||
cls,
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: int | None = None,
|
||||
) -> list[str]:
|
||||
graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id)
|
||||
if not graph:
|
||||
rev = "" if graph_version is None else f" v{graph_version}"
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Agent #{graph_id}{rev} not found."
|
||||
)
|
||||
|
||||
return await execution_db.list_executions(graph_id, graph_version)
|
||||
|
||||
@classmethod
|
||||
async def get_graph_run_status(
|
||||
cls,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> execution_db.ExecutionStatus:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
execution = await execution_db.get_graph_execution(graph_exec_id, user_id)
|
||||
if not execution:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Execution #{graph_exec_id} not found."
|
||||
)
|
||||
|
||||
return execution.executionStatus
|
||||
|
||||
@classmethod
|
||||
async def get_graph_run_node_execution_results(
|
||||
cls,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
async def create_schedule(
|
||||
self,
|
||||
graph_id: str,
|
||||
cron: str,
|
||||
input_data: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
return {
|
||||
"id": await asyncio.to_thread(
|
||||
lambda: self.execution_scheduler_client.add_execution_schedule(
|
||||
graph_id=graph_id,
|
||||
graph_version=graph.version,
|
||||
cron=cron,
|
||||
input_data=input_data,
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
def update_schedule(
|
||||
self,
|
||||
schedule_id: str,
|
||||
input_data: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
execution_scheduler = self.execution_scheduler_client
|
||||
is_enabled = input_data.get("is_enabled", False)
|
||||
execution_scheduler.update_schedule(schedule_id, is_enabled, user_id=user_id)
|
||||
return {"id": schedule_id}
|
||||
|
||||
async def get_user_credits(
|
||||
self, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, int]:
|
||||
return {"credits": await self._user_credit_model.get_or_refill_credit(user_id)}
|
||||
|
||||
def get_execution_schedules(
|
||||
self, graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, str]:
|
||||
execution_scheduler = self.execution_scheduler_client
|
||||
return execution_scheduler.get_execution_schedules(graph_id, user_id)
|
||||
|
||||
async def health(self):
|
||||
return {"status": "healthy"}
|
||||
|
||||
@classmethod
|
||||
def update_configuration(
|
||||
cls,
|
||||
updated_settings: Annotated[
|
||||
Dict[str, Any],
|
||||
Body(
|
||||
examples=[
|
||||
{
|
||||
"config": {
|
||||
"num_graph_workers": 10,
|
||||
"num_node_workers": 10,
|
||||
}
|
||||
}
|
||||
]
|
||||
),
|
||||
],
|
||||
):
|
||||
settings = Settings()
|
||||
try:
|
||||
updated_fields: dict[Any, Any] = {"config": [], "secrets": []}
|
||||
for key, value in updated_settings.get("config", {}).items():
|
||||
if hasattr(settings.config, key):
|
||||
setattr(settings.config, key, value)
|
||||
updated_fields["config"].append(key)
|
||||
for key, value in updated_settings.get("secrets", {}).items():
|
||||
if hasattr(settings.secrets, key):
|
||||
setattr(settings.secrets, key, value)
|
||||
updated_fields["secrets"].append(key)
|
||||
settings.save()
|
||||
return {
|
||||
"message": "Settings updated successfully",
|
||||
"updated_fields": updated_fields,
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
app.dependency_overrides.update(overrides)
|
||||
|
||||
539
autogpt_platform/backend/backend/server/routers/v1.py
Normal file
539
autogpt_platform/backend/backend/server/routers/v1.py
Normal file
@@ -0,0 +1,539 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Annotated, Any, Dict
|
||||
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
import backend.data.block
|
||||
import backend.server.integrations.router
|
||||
import backend.server.routers.analytics
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler
|
||||
from backend.server.model import CreateGraph, SetGraphActiveVersion
|
||||
from backend.server.utils import get_user_id
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_manager_client() -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_scheduler_client() -> ExecutionScheduler:
|
||||
return get_service_client(ExecutionScheduler)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_user_credit_model = get_user_credit_model()
|
||||
|
||||
# Define the API routes
|
||||
v1_router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
v1_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
|
||||
v1_router.include_router(
|
||||
backend.server.integrations.router.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
v1_router.include_router(
|
||||
backend.server.routers.analytics.router,
|
||||
prefix="/analytics",
|
||||
tags=["analytics"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Auth #############################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.post("/auth/user", tags=["auth"], dependencies=[Depends(auth_middleware)])
|
||||
async def get_or_create_user_route(user_data: dict = Depends(auth_middleware)):
|
||||
user = await get_or_create_user(user_data)
|
||||
return user.model_dump()
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Blocks ###########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.get(path="/blocks", tags=["blocks"], dependencies=[Depends(auth_middleware)])
|
||||
def get_graph_blocks() -> list[dict[Any, Any]]:
|
||||
blocks = [block() for block in backend.data.block.get_blocks().values()]
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks]
|
||||
|
||||
|
||||
@v1_router.post(path="/blocks/{block_id}/execute", tags=["blocks"])
|
||||
def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
|
||||
output = defaultdict(list)
|
||||
for name, data in obj.execute(data):
|
||||
output[name].append(data)
|
||||
return output
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Credits ##########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.get(path="/credits", dependencies=[Depends(auth_middleware)])
|
||||
async def get_user_credits(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, int]:
|
||||
return {"credits": await _user_credit_model.get_or_refill_credit(user_id)}
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Graphs ###########################
|
||||
########################################################
|
||||
|
||||
|
||||
class DeleteGraphResponse(TypedDict):
|
||||
version_counts: int
|
||||
|
||||
|
||||
@v1_router.get(path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)])
|
||||
async def get_graphs(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/versions/{version}",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
version: int | None = None,
|
||||
hide_credentials: bool = False,
|
||||
) -> graph_db.Graph:
|
||||
graph = await graph_db.get_graph(
|
||||
graph_id, version, user_id=user_id, hide_credentials=hide_credentials
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graph
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/versions",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@v1_router.get(
|
||||
path="/templates/{graph_id}/versions",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph_all_versions(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not graphs:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graphs
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def delete_graph(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> DeleteGraphResponse:
|
||||
return {"version_counts": await graph_db.delete_graph(graph_id, user_id=user_id)}
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@v1_router.put(
|
||||
path="/templates/{graph_id}",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def update_graph(
|
||||
graph_id: str,
|
||||
graph: graph_db.Graph,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> graph_db.Graph:
|
||||
# Sanity check
|
||||
if graph.id and graph.id != graph_id:
|
||||
raise HTTPException(400, detail="Graph ID does not match ID in URI")
|
||||
|
||||
# Determine new version
|
||||
existing_versions = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not existing_versions:
|
||||
raise HTTPException(404, detail=f"Graph #{graph_id} not found")
|
||||
latest_version_number = max(g.version for g in existing_versions)
|
||||
graph.version = latest_version_number + 1
|
||||
|
||||
latest_version_graph = next(
|
||||
v for v in existing_versions if v.version == latest_version_number
|
||||
)
|
||||
if latest_version_graph.is_template != graph.is_template:
|
||||
raise HTTPException(
|
||||
400, detail="Changing is_template on an existing graph is forbidden"
|
||||
)
|
||||
graph.is_active = not graph.is_template
|
||||
graph.reassign_ids()
|
||||
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id, version=new_graph_version.version, user_id=user_id
|
||||
)
|
||||
|
||||
return new_graph_version
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def set_graph_active_version(
|
||||
graph_id: str,
|
||||
request_body: SetGraphActiveVersion,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
):
|
||||
new_active_version = request_body.active_graph_version
|
||||
if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id):
|
||||
raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found")
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id,
|
||||
version=request_body.active_graph_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[str, Any]: # FIXME: add proper return type
|
||||
try:
|
||||
graph_exec = execution_manager_client().add_execution(
|
||||
graph_id, node_input, user_id=user_id
|
||||
)
|
||||
return {"id": graph_exec["graph_exec_id"]}
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def stop_graph_run(
|
||||
graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
if not await execution_db.get_graph_execution(graph_exec_id, user_id):
|
||||
raise HTTPException(404, detail=f"Agent execution #{graph_exec_id} not found")
|
||||
|
||||
await asyncio.to_thread(
|
||||
lambda: execution_manager_client().cancel_execution(graph_exec_id)
|
||||
)
|
||||
|
||||
# Retrieve & return canceled graph execution in its final state
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/input_schema",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph_input_schema(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[graph_db.InputSchemaItem]:
|
||||
try:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
return graph.get_input_schema() if graph else []
|
||||
except Exception:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def list_graph_runs(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: int | None = None,
|
||||
) -> list[str]:
|
||||
graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id)
|
||||
if not graph:
|
||||
rev = "" if graph_version is None else f" v{graph_version}"
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Agent #{graph_id}{rev} not found."
|
||||
)
|
||||
|
||||
return await execution_db.list_executions(graph_id, graph_version)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph_run_node_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
|
||||
# NOTE: This is used for testing
|
||||
async def get_graph_run_status(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> execution_db.ExecutionStatus:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
execution = await execution_db.get_graph_execution(graph_exec_id, user_id)
|
||||
if not execution:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Execution #{graph_exec_id} not found."
|
||||
)
|
||||
|
||||
return execution.executionStatus
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Templates ########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/templates",
|
||||
tags=["graphs", "templates"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/templates/{graph_id}",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_template(graph_id: str, version: int | None = None) -> graph_db.Graph:
|
||||
graph = await graph_db.get_graph(graph_id, version, template=True)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Template #{graph_id} not found.")
|
||||
return graph
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = create_graph.graph
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(reassign_graph_id=True)
|
||||
|
||||
return await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/templates",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def create_new_template(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=True, user_id=user_id)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Schedules ########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def create_schedule(
|
||||
graph_id: str,
|
||||
cron: str,
|
||||
input_data: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
return {
|
||||
"id": await asyncio.to_thread(
|
||||
lambda: execution_scheduler_client().add_execution_schedule(
|
||||
graph_id=graph_id,
|
||||
graph_version=graph.version,
|
||||
cron=cron,
|
||||
input_data=input_data,
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/schedules/{schedule_id}",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def update_schedule(
|
||||
schedule_id: str,
|
||||
input_data: dict[Any, Any],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[Any, Any]:
|
||||
is_enabled = input_data.get("is_enabled", False)
|
||||
execution_scheduler_client().update_schedule(
|
||||
schedule_id, is_enabled, user_id=user_id
|
||||
)
|
||||
return {"id": schedule_id}
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_execution_schedules(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, str]:
|
||||
return execution_scheduler_client().get_execution_schedules(graph_id, user_id)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Settings ########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/settings", tags=["settings"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def update_configuration(
|
||||
updated_settings: Annotated[
|
||||
Dict[str, Any],
|
||||
Body(
|
||||
examples=[
|
||||
{
|
||||
"config": {
|
||||
"num_graph_workers": 10,
|
||||
"num_node_workers": 10,
|
||||
}
|
||||
}
|
||||
]
|
||||
),
|
||||
],
|
||||
):
|
||||
settings = Settings()
|
||||
try:
|
||||
updated_fields: dict[Any, Any] = {"config": [], "secrets": []}
|
||||
for key, value in updated_settings.get("config", {}).items():
|
||||
if hasattr(settings.config, key):
|
||||
setattr(settings.config, key, value)
|
||||
updated_fields["config"].append(key)
|
||||
for key, value in updated_settings.get("secrets", {}).items():
|
||||
if hasattr(settings.secrets, key):
|
||||
setattr(settings.secrets, key, value)
|
||||
updated_fields["secrets"].append(key)
|
||||
settings.save()
|
||||
return {
|
||||
"message": "Settings updated successfully",
|
||||
"updated_fields": updated_fields,
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
@@ -5,7 +5,7 @@ from contextlib import asynccontextmanager
|
||||
import uvicorn
|
||||
from autogpt_libs.auth import parse_jwt_token
|
||||
from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.queue import AsyncRedisExecutionEventBus
|
||||
@@ -31,15 +31,6 @@ docs_url = "/docs" if settings.config.app_env == AppEnvironment.LOCAL else None
|
||||
app = FastAPI(lifespan=lifespan, docs_url=docs_url)
|
||||
_connection_manager = None
|
||||
|
||||
logger.info(f"CORS allow origins: {settings.config.backend_cors_allow_origins}")
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
def get_connection_manager():
|
||||
global _connection_manager
|
||||
@@ -176,8 +167,16 @@ async def websocket_router(
|
||||
|
||||
class WebsocketServer(AppProcess):
|
||||
def run(self):
|
||||
logger.info(f"CORS allow origins: {settings.config.backend_cors_allow_origins}")
|
||||
server_app = CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
uvicorn.run(
|
||||
app,
|
||||
server_app,
|
||||
host=Config().websocket_server_host,
|
||||
port=Config().websocket_server_port,
|
||||
)
|
||||
|
||||
@@ -252,7 +252,7 @@ async def block_autogen_agent():
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"input": "Write me a block that writes a string into a file."}
|
||||
response = server.agent_server.execute_graph(
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
print(response)
|
||||
|
||||
@@ -156,7 +156,7 @@ async def reddit_marketing_agent():
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"subreddit": "AutoGPT"}
|
||||
response = server.agent_server.execute_graph(
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
print(response)
|
||||
|
||||
@@ -78,7 +78,7 @@ async def sample_agent():
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), test_user.id)
|
||||
input_data = {"input_1": "Hello", "input_2": "World"}
|
||||
response = server.agent_server.execute_graph(
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
print(response)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from backend.data import db
|
||||
@@ -6,9 +7,10 @@ from backend.data.execution import ExecutionStatus
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.data.user import create_default_user
|
||||
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
|
||||
from backend.server.rest_api import AgentServer, get_user_id
|
||||
from backend.server.rest_api import AgentServer
|
||||
from backend.server.utils import get_user_id
|
||||
|
||||
log = print
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SpinTestServer:
|
||||
@@ -57,17 +59,19 @@ async def wait_execution(
|
||||
timeout: int = 20,
|
||||
) -> list:
|
||||
async def is_execution_completed():
|
||||
status = await AgentServer().get_graph_run_status(
|
||||
status = await AgentServer().test_get_graph_run_status(
|
||||
graph_id, graph_exec_id, user_id
|
||||
)
|
||||
log.info(f"Execution status: {status}")
|
||||
if status == ExecutionStatus.FAILED:
|
||||
log.info("Execution failed")
|
||||
raise Exception("Execution failed")
|
||||
return status == ExecutionStatus.COMPLETED
|
||||
|
||||
# Wait for the executions to complete
|
||||
for i in range(timeout):
|
||||
if await is_execution_completed():
|
||||
return await AgentServer().get_graph_run_node_execution_results(
|
||||
return await AgentServer().test_get_graph_run_node_execution_results(
|
||||
graph_id, graph_exec_id, user_id
|
||||
)
|
||||
time.sleep(1)
|
||||
@@ -79,7 +83,7 @@ def execute_block_test(block: Block):
|
||||
prefix = f"[Test-{block.name}]"
|
||||
|
||||
if not block.test_input or not block.test_output:
|
||||
log(f"{prefix} No test data provided")
|
||||
log.info(f"{prefix} No test data provided")
|
||||
return
|
||||
if not isinstance(block.test_input, list):
|
||||
block.test_input = [block.test_input]
|
||||
@@ -87,15 +91,15 @@ def execute_block_test(block: Block):
|
||||
block.test_output = [block.test_output]
|
||||
|
||||
output_index = 0
|
||||
log(f"{prefix} Executing {len(block.test_input)} tests...")
|
||||
log.info(f"{prefix} Executing {len(block.test_input)} tests...")
|
||||
prefix = " " * 4 + prefix
|
||||
|
||||
for mock_name, mock_obj in (block.test_mock or {}).items():
|
||||
log(f"{prefix} mocking {mock_name}...")
|
||||
log.info(f"{prefix} mocking {mock_name}...")
|
||||
if hasattr(block, mock_name):
|
||||
setattr(block, mock_name, mock_obj)
|
||||
else:
|
||||
log(f"{prefix} mock {mock_name} not found in block")
|
||||
log.info(f"{prefix} mock {mock_name} not found in block")
|
||||
|
||||
extra_exec_kwargs = {}
|
||||
|
||||
@@ -107,7 +111,7 @@ def execute_block_test(block: Block):
|
||||
extra_exec_kwargs[CREDENTIALS_FIELD_NAME] = block.test_credentials
|
||||
|
||||
for input_data in block.test_input:
|
||||
log(f"{prefix} in: {input_data}")
|
||||
log.info(f"{prefix} in: {input_data}")
|
||||
|
||||
for output_name, output_data in block.execute(input_data, **extra_exec_kwargs):
|
||||
if output_index >= len(block.test_output):
|
||||
@@ -125,7 +129,7 @@ def execute_block_test(block: Block):
|
||||
is_matching = False
|
||||
|
||||
mark = "✅" if is_matching else "❌"
|
||||
log(f"{prefix} {mark} comparing `{data}` vs `{expected_data}`")
|
||||
log.info(f"{prefix} {mark} comparing `{data}` vs `{expected_data}`")
|
||||
if not is_matching:
|
||||
raise ValueError(
|
||||
f"{prefix}: wrong output {data} vs {expected_data}"
|
||||
|
||||
50
autogpt_platform/backend/poetry.lock
generated
50
autogpt_platform/backend/poetry.lock
generated
@@ -1988,8 +1988,8 @@ python-dateutil = ">=2.5.3"
|
||||
tqdm = ">=4.64.1"
|
||||
typing-extensions = ">=3.7.4"
|
||||
urllib3 = [
|
||||
{version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""},
|
||||
{version = ">=1.26.5", markers = "python_version >= \"3.12\" and python_version < \"4.0\""},
|
||||
{version = ">=1.26.0", markers = "python_version >= \"3.8\" and python_version < \"3.12\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -2285,8 +2285,8 @@ files = [
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.23.4"
|
||||
typing-extensions = [
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
{version = ">=4.12.2", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=4.6.1", markers = "python_version < \"3.13\""},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -2458,13 +2458,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.386"
|
||||
version = "1.1.387"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyright-1.1.386-py3-none-any.whl", hash = "sha256:7071ac495593b2258ccdbbf495f1a5c0e5f27951f6b429bed4e8b296eb5cd21d"},
|
||||
{file = "pyright-1.1.386.tar.gz", hash = "sha256:8e9975e34948ba5f8e07792a9c9d2bdceb2c6c0b61742b068d2229ca2bc4a9d9"},
|
||||
{file = "pyright-1.1.387-py3-none-any.whl", hash = "sha256:6a1f495a261a72e12ad17e20d1ae3df4511223c773b19407cfa006229b1b08a5"},
|
||||
{file = "pyright-1.1.387.tar.gz", hash = "sha256:577de60224f7fe36505d5b181231e3a395d427b7873be0bbcaa962a29ea93a60"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2880,29 +2880,29 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.1"
|
||||
version = "0.7.2"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"},
|
||||
{file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"},
|
||||
{file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"},
|
||||
{file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"},
|
||||
{file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"},
|
||||
{file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"},
|
||||
{file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"},
|
||||
{file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"},
|
||||
{file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"},
|
||||
{file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"},
|
||||
{file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"},
|
||||
{file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"},
|
||||
{file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"},
|
||||
{file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3880,4 +3880,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "00069717c4818aa24b164e3c00a104d559c2fb16c531f60bccfcf5a69fb553c8"
|
||||
content-hash = "bbad5245c6bd3cd1d93d9f047b65e40beda081d83c2eed59eed508c41a9b5ff1"
|
||||
|
||||
@@ -52,8 +52,8 @@ poethepoet = "^0.29.0"
|
||||
httpx = "^0.27.0"
|
||||
pytest-watcher = "^0.4.2"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.1"
|
||||
pyright = "^1.1.386"
|
||||
ruff = "^0.7.2"
|
||||
pyright = "^1.1.387"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
aiohappyeyeballs = "^2.4.3"
|
||||
|
||||
@@ -1,7 +1,20 @@
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.util.test import SpinTestServer
|
||||
|
||||
# NOTE: You can run tests like with the --log-cli-level=INFO to see the logs
|
||||
# Set up logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Create console handler with formatting
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
async def server():
|
||||
@@ -12,7 +25,7 @@ async def server():
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
async def graph_cleanup(server):
|
||||
created_graph_ids = []
|
||||
original_create_graph = server.agent_server.create_graph
|
||||
original_create_graph = server.agent_server.test_create_graph
|
||||
|
||||
async def create_graph_wrapper(*args, **kwargs):
|
||||
created_graph = await original_create_graph(*args, **kwargs)
|
||||
@@ -22,13 +35,14 @@ async def graph_cleanup(server):
|
||||
return created_graph
|
||||
|
||||
try:
|
||||
server.agent_server.create_graph = create_graph_wrapper
|
||||
server.agent_server.test_create_graph = create_graph_wrapper
|
||||
yield # This runs the test function
|
||||
finally:
|
||||
server.agent_server.create_graph = original_create_graph
|
||||
server.agent_server.test_create_graph = original_create_graph
|
||||
|
||||
# Delete the created graphs and assert they were deleted
|
||||
for graph_id, user_id in created_graph_ids:
|
||||
resp = await server.agent_server.delete_graph(graph_id, user_id)
|
||||
num_deleted = resp["version_counts"]
|
||||
assert num_deleted > 0, f"Graph {graph_id} was not deleted."
|
||||
if user_id:
|
||||
resp = await server.agent_server.test_delete_graph(graph_id, user_id)
|
||||
num_deleted = resp["version_counts"]
|
||||
assert num_deleted > 0, f"Graph {graph_id} was not deleted."
|
||||
|
||||
@@ -31,7 +31,7 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
description="Test graph",
|
||||
nodes=[
|
||||
Node(id="node_1", block_id=value_block),
|
||||
Node(id="node_2", block_id=input_block),
|
||||
Node(id="node_2", block_id=input_block, input_default={"name": "input"}),
|
||||
Node(id="node_3", block_id=value_block),
|
||||
],
|
||||
links=[
|
||||
@@ -47,15 +47,15 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
|
||||
try:
|
||||
await server.agent_server.create_graph(create_graph, False, DEFAULT_USER_ID)
|
||||
await server.agent_server.test_create_graph(create_graph, DEFAULT_USER_ID)
|
||||
assert False, "Should not be able to connect nodes from different subgraphs"
|
||||
except ValueError as e:
|
||||
assert "different subgraph" in str(e)
|
||||
|
||||
# Change node_1 <-> node_3 link to node_1 <-> node_2 (input for subgraph_1)
|
||||
graph.links[0].sink_id = "node_2"
|
||||
created_graph = await server.agent_server.create_graph(
|
||||
create_graph, False, DEFAULT_USER_ID
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
assert UUID(created_graph.id)
|
||||
@@ -102,8 +102,8 @@ async def test_get_input_schema(server: SpinTestServer):
|
||||
)
|
||||
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
created_graph = await server.agent_server.create_graph(
|
||||
create_graph, False, DEFAULT_USER_ID
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
input_schema = created_graph.get_input_schema()
|
||||
@@ -138,8 +138,8 @@ async def test_get_input_schema_none_required(server: SpinTestServer):
|
||||
)
|
||||
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
created_graph = await server.agent_server.create_graph(
|
||||
create_graph, False, DEFAULT_USER_ID
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
input_schema = created_graph.get_input_schema()
|
||||
@@ -180,8 +180,8 @@ async def test_get_input_schema_with_linked_blocks(server: SpinTestServer):
|
||||
)
|
||||
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
created_graph = await server.agent_server.create_graph(
|
||||
create_graph, False, DEFAULT_USER_ID
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
input_schema = created_graph.get_input_schema()
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
from prisma.models import User
|
||||
|
||||
@@ -9,9 +11,12 @@ from backend.server.rest_api import AgentServer
|
||||
from backend.usecases.sample import create_test_graph, create_test_user
|
||||
from backend.util.test import SpinTestServer, wait_execution
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def create_graph(s: SpinTestServer, g: graph.Graph, u: User) -> graph.Graph:
|
||||
return await s.agent_server.create_graph(CreateGraph(graph=g), False, u.id)
|
||||
logger.info(f"Creating graph for user {u.id}")
|
||||
return await s.agent_server.test_create_graph(CreateGraph(graph=g), u.id)
|
||||
|
||||
|
||||
async def execute_graph(
|
||||
@@ -21,12 +26,20 @@ async def execute_graph(
|
||||
input_data: dict,
|
||||
num_execs: int = 4,
|
||||
) -> str:
|
||||
logger.info(f"Executing graph {test_graph.id} for user {test_user.id}")
|
||||
logger.info(f"Input data: {input_data}")
|
||||
|
||||
# --- Test adding new executions --- #
|
||||
response = agent_server.execute_graph(test_graph.id, input_data, test_user.id)
|
||||
response = await agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
graph_exec_id = response["id"]
|
||||
logger.info(f"Created execution with ID: {graph_exec_id}")
|
||||
|
||||
# Execution queue should be empty
|
||||
logger.info("Waiting for execution to complete...")
|
||||
result = await wait_execution(test_user.id, test_graph.id, graph_exec_id)
|
||||
logger.info(f"Execution completed with {len(result)} results")
|
||||
assert result and len(result) == num_execs
|
||||
return graph_exec_id
|
||||
|
||||
@@ -37,7 +50,8 @@ async def assert_sample_graph_executions(
|
||||
test_user: User,
|
||||
graph_exec_id: str,
|
||||
):
|
||||
executions = await agent_server.get_graph_run_node_execution_results(
|
||||
logger.info(f"Checking execution results for graph {test_graph.id}")
|
||||
executions = await agent_server.test_get_graph_run_node_execution_results(
|
||||
test_graph.id,
|
||||
graph_exec_id,
|
||||
test_user.id,
|
||||
@@ -57,6 +71,7 @@ async def assert_sample_graph_executions(
|
||||
|
||||
# Executing StoreValueBlock
|
||||
exec = executions[0]
|
||||
logger.info(f"Checking first StoreValueBlock execution: {exec}")
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert (
|
||||
@@ -69,6 +84,7 @@ async def assert_sample_graph_executions(
|
||||
|
||||
# Executing StoreValueBlock
|
||||
exec = executions[1]
|
||||
logger.info(f"Checking second StoreValueBlock execution: {exec}")
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert (
|
||||
@@ -81,6 +97,7 @@ async def assert_sample_graph_executions(
|
||||
|
||||
# Executing FillTextTemplateBlock
|
||||
exec = executions[2]
|
||||
logger.info(f"Checking FillTextTemplateBlock execution: {exec}")
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert exec.output_data == {"output": ["Hello, World!!!"]}
|
||||
@@ -95,6 +112,7 @@ async def assert_sample_graph_executions(
|
||||
|
||||
# Executing PrintToConsoleBlock
|
||||
exec = executions[3]
|
||||
logger.info(f"Checking PrintToConsoleBlock execution: {exec}")
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert exec.output_data == {"status": ["printed"]}
|
||||
@@ -104,6 +122,7 @@ async def assert_sample_graph_executions(
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_agent_execution(server: SpinTestServer):
|
||||
logger.info("Starting test_agent_execution")
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(server, create_test_graph(), test_user)
|
||||
data = {"input_1": "Hello", "input_2": "World"}
|
||||
@@ -117,6 +136,7 @@ async def test_agent_execution(server: SpinTestServer):
|
||||
await assert_sample_graph_executions(
|
||||
server.agent_server, test_graph, test_user, graph_exec_id
|
||||
)
|
||||
logger.info("Completed test_agent_execution")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
@@ -132,6 +152,7 @@ async def test_input_pin_always_waited(server: SpinTestServer):
|
||||
// key
|
||||
StoreValueBlock2
|
||||
"""
|
||||
logger.info("Starting test_input_pin_always_waited")
|
||||
nodes = [
|
||||
graph.Node(
|
||||
block_id=StoreValueBlock().id,
|
||||
@@ -172,7 +193,8 @@ async def test_input_pin_always_waited(server: SpinTestServer):
|
||||
server.agent_server, test_graph, test_user, {}, 3
|
||||
)
|
||||
|
||||
executions = await server.agent_server.get_graph_run_node_execution_results(
|
||||
logger.info("Checking execution results")
|
||||
executions = await server.agent_server.test_get_graph_run_node_execution_results(
|
||||
test_graph.id, graph_exec_id, test_user.id
|
||||
)
|
||||
assert len(executions) == 3
|
||||
@@ -180,6 +202,7 @@ async def test_input_pin_always_waited(server: SpinTestServer):
|
||||
# Hence executing extraction of "key" from {"key1": "value1", "key2": "value2"}
|
||||
assert executions[2].status == execution.ExecutionStatus.COMPLETED
|
||||
assert executions[2].output_data == {"output": ["value2"]}
|
||||
logger.info("Completed test_input_pin_always_waited")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
@@ -197,6 +220,7 @@ async def test_static_input_link_on_graph(server: SpinTestServer):
|
||||
And later, another output is produced on input pin `b`, which is a static link,
|
||||
this input will complete the input of those three incomplete executions.
|
||||
"""
|
||||
logger.info("Starting test_static_input_link_on_graph")
|
||||
nodes = [
|
||||
graph.Node(block_id=StoreValueBlock().id, input_default={"input": 4}), # a
|
||||
graph.Node(block_id=StoreValueBlock().id, input_default={"input": 4}), # a
|
||||
@@ -252,11 +276,14 @@ async def test_static_input_link_on_graph(server: SpinTestServer):
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server, test_graph, test_user, {}, 8
|
||||
)
|
||||
executions = await server.agent_server.get_graph_run_node_execution_results(
|
||||
logger.info("Checking execution results")
|
||||
executions = await server.agent_server.test_get_graph_run_node_execution_results(
|
||||
test_graph.id, graph_exec_id, test_user.id
|
||||
)
|
||||
assert len(executions) == 8
|
||||
# The last 3 executions will be a+b=4+5=9
|
||||
for exec_data in executions[-3:]:
|
||||
for i, exec_data in enumerate(executions[-3:]):
|
||||
logger.info(f"Checking execution {i+1} of last 3: {exec_data}")
|
||||
assert exec_data.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec_data.output_data == {"result": [9]}
|
||||
logger.info("Completed test_static_input_link_on_graph")
|
||||
|
||||
@@ -12,7 +12,7 @@ from backend.util.test import SpinTestServer
|
||||
async def test_agent_schedule(server: SpinTestServer):
|
||||
await db.connect()
|
||||
test_user = await create_test_user()
|
||||
test_graph = await server.agent_server.create_graph(
|
||||
test_graph = await server.agent_server.test_create_graph(
|
||||
create_graph=CreateGraph(graph=create_test_graph()),
|
||||
is_template=False,
|
||||
user_id=test_user.id,
|
||||
|
||||
@@ -69,6 +69,7 @@ services:
|
||||
- DATABASEMANAGER_HOST=executor
|
||||
- FRONTEND_BASE_URL=http://localhost:3000
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
- ENCRYPTION_KEY=some-long-random-string-encryption-key
|
||||
ports:
|
||||
- "8006:8006"
|
||||
- "8003:8003" # execution scheduler
|
||||
@@ -104,7 +105,8 @@ services:
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
- DATABASEMANAGER_HOST=0.0.0.0
|
||||
- DATABASEMANAGER_HOST=0.0.0.0
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
ports:
|
||||
- "8002:8000"
|
||||
networks:
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<title>Example Files</title>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Example Files</h1>
|
||||
<ul>
|
||||
<li><a href="example1.txt">Example 1</a></li>
|
||||
<li><a href="example2.txt">Example 2</a></li>
|
||||
<li><a href="example3.txt">Example 3</a></li>
|
||||
</ul>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,7 +0,0 @@
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=https://dev-server.agpt.co/auth/callback
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=https://dev-server.agpt.co/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=wss://dev-ws-server.agpt.co/ws
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=https://dev-market.agpt.co/api/v1/market
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImFkZmp0ZXh0a3VpbHd1aHpkanBmIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MzAyNTE3MDIsImV4cCI6MjA0NTgyNzcwMn0.IuQNXsHEKJNxtS9nyFeqO0BGMYN8sPiObQhuJLSK9xk"
|
||||
NEXT_PUBLIC_SUPABASE_URL="https://adfjtextkuilwuhzdjpf.supabase.co"
|
||||
NEXT_PUBLIC_BEHAVE_AS=CLOUD
|
||||
@@ -1,8 +0,0 @@
|
||||
APP_ENV=production
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=https://market.agpt.co/api/v1/market
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=https://backend.agpt.co/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=wss://ws-backend.agpt.co/ws
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=https://backend.agpt.co/auth/callback
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImJnd3B3ZHN4YmxyeWloaW51dGJ4Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3MzAyODYzMDUsImV4cCI6MjA0NTg2MjMwNX0.ISa2IofTdQIJmmX5JwKGGNajqjsD8bjaGBzK90SubE0
|
||||
NEXT_PUBLIC_SUPABASE_URL=https://bgwpwdsxblryihinutbx.supabase.co
|
||||
NEXT_PUBLIC_BEHAVE_AS=CLOUD
|
||||
@@ -30,7 +30,7 @@
|
||||
"@radix-ui/react-context-menu": "^2.2.1",
|
||||
"@radix-ui/react-dialog": "^1.1.2",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.2",
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
"@radix-ui/react-icons": "^1.3.1",
|
||||
"@radix-ui/react-label": "^2.1.0",
|
||||
"@radix-ui/react-popover": "^1.1.2",
|
||||
"@radix-ui/react-scroll-area": "^1.2.0",
|
||||
@@ -42,19 +42,19 @@
|
||||
"@radix-ui/react-tooltip": "^1.1.3",
|
||||
"@sentry/nextjs": "^8",
|
||||
"@supabase/ssr": "^0.5.1",
|
||||
"@supabase/supabase-js": "^2.46.0",
|
||||
"@supabase/supabase-js": "^2.46.1",
|
||||
"@tanstack/react-table": "^8.20.5",
|
||||
"@xyflow/react": "^12.3.2",
|
||||
"@xyflow/react": "^12.3.4",
|
||||
"ajv": "^8.17.1",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "1.0.0",
|
||||
"date-fns": "^3.6.0",
|
||||
"cmdk": "1.0.4",
|
||||
"date-fns": "^4.1.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"lucide-react": "^0.454.0",
|
||||
"moment": "^2.30.1",
|
||||
"next": "^14.2.13",
|
||||
"next-themes": "^0.3.0",
|
||||
"next-themes": "^0.4.3",
|
||||
"react": "^18",
|
||||
"react-day-picker": "^9.2.1",
|
||||
"react-dom": "^18",
|
||||
@@ -63,36 +63,36 @@
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-modal": "^3.16.1",
|
||||
"react-shepherd": "^6.1.4",
|
||||
"recharts": "^2.13.0",
|
||||
"recharts": "^2.13.3",
|
||||
"tailwind-merge": "^2.5.4",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"uuid": "^10.0.0",
|
||||
"uuid": "^11.0.2",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "^1.9.0",
|
||||
"@chromatic-com/storybook": "^3.2.2",
|
||||
"@playwright/test": "^1.48.2",
|
||||
"@storybook/addon-essentials": "^8.3.6",
|
||||
"@storybook/addon-interactions": "^8.3.6",
|
||||
"@storybook/addon-links": "^8.3.6",
|
||||
"@storybook/addon-onboarding": "^8.3.6",
|
||||
"@storybook/blocks": "^8.3.6",
|
||||
"@storybook/nextjs": "^8.3.6",
|
||||
"@storybook/addon-essentials": "^8.4.1",
|
||||
"@storybook/addon-interactions": "^8.4.1",
|
||||
"@storybook/addon-links": "^8.4.1",
|
||||
"@storybook/addon-onboarding": "^8.4.1",
|
||||
"@storybook/blocks": "^8.4.1",
|
||||
"@storybook/nextjs": "^8.4.1",
|
||||
"@storybook/react": "^8.3.5",
|
||||
"@storybook/test": "^8.3.5",
|
||||
"@storybook/test-runner": "^0.19.1",
|
||||
"@types/node": "^22.8.4",
|
||||
"@types/node": "^22.8.7",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-modal": "^3.16.3",
|
||||
"concurrently": "^9.0.1",
|
||||
"concurrently": "^9.1.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "15.0.2",
|
||||
"eslint-plugin-storybook": "^0.10.1",
|
||||
"eslint-plugin-storybook": "^0.11.0",
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-tailwindcss": "^0.6.8",
|
||||
"storybook": "^8.3.6",
|
||||
"storybook": "^8.4.1",
|
||||
"tailwindcss": "^3.4.14",
|
||||
"typescript": "^5"
|
||||
},
|
||||
|
||||
@@ -352,14 +352,9 @@ const NodeKeyValueInput: FC<{
|
||||
const defaultEntries = new Map(
|
||||
Object.entries(entries ?? schema.default ?? {}),
|
||||
);
|
||||
const prefix = `${selfKey}_#_`;
|
||||
connections
|
||||
.filter((c) => c.targetHandle.startsWith(prefix))
|
||||
.map((c) => c.targetHandle.slice(prefix.length))
|
||||
.forEach((k) => !defaultEntries.has(k) && defaultEntries.set(k, ""));
|
||||
|
||||
return Array.from(defaultEntries, ([key, value]) => ({ key, value }));
|
||||
}, [connections, entries, schema.default, selfKey]);
|
||||
}, [entries, schema.default]);
|
||||
|
||||
const [keyValuePairs, setKeyValuePairs] = useState<
|
||||
{ key: string; value: string | number | null }[]
|
||||
|
||||
@@ -25,7 +25,7 @@ const ToastViewport = React.forwardRef<
|
||||
ToastViewport.displayName = ToastPrimitives.Viewport.displayName;
|
||||
|
||||
const toastVariants = cva(
|
||||
"group pointer-events-auto relative flex w-full items-center justify-between space-x-2 overflow-hidden rounded-md border border-neutral-200 p-4 pr-6 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full dark:border-neutral-800",
|
||||
"whitespace-pre-line group pointer-events-auto relative flex w-full items-center justify-between space-x-2 overflow-hidden rounded-md border border-neutral-200 p-4 pr-6 shadow-lg transition-all data-[swipe=cancel]:translate-x-0 data-[swipe=end]:translate-x-[var(--radix-toast-swipe-end-x)] data-[swipe=move]:translate-x-[var(--radix-toast-swipe-move-x)] data-[swipe=move]:transition-none data-[state=open]:animate-in data-[state=closed]:animate-out data-[swipe=end]:animate-out data-[state=closed]:fade-out-80 data-[state=closed]:slide-out-to-right-full data-[state=open]:slide-in-from-top-full data-[state=open]:sm:slide-in-from-bottom-full dark:border-neutral-800",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
|
||||
@@ -17,6 +17,7 @@ import { Connection, MarkerType } from "@xyflow/react";
|
||||
import Ajv from "ajv";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useRouter, useSearchParams, usePathname } from "next/navigation";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
|
||||
const ajv = new Ajv({ strict: false, allErrors: true });
|
||||
|
||||
@@ -25,6 +26,7 @@ export default function useAgentGraph(
|
||||
template?: boolean,
|
||||
passDataToBeads?: boolean,
|
||||
) {
|
||||
const { toast } = useToast();
|
||||
const [router, searchParams, pathname] = [
|
||||
useRouter(),
|
||||
useSearchParams(),
|
||||
@@ -588,7 +590,8 @@ export default function useAgentGraph(
|
||||
[availableNodes],
|
||||
);
|
||||
|
||||
const saveAgent = useCallback(
|
||||
const _saveAgent = (
|
||||
() =>
|
||||
async (asTemplate: boolean = false) => {
|
||||
//FIXME frontend ids should be resolved better (e.g. returned from the server)
|
||||
// currently this relays on block_id and position
|
||||
@@ -742,6 +745,23 @@ export default function useAgentGraph(
|
||||
},
|
||||
}));
|
||||
});
|
||||
}
|
||||
)();
|
||||
|
||||
const saveAgent = useCallback(
|
||||
async (asTemplate: boolean = false) => {
|
||||
try {
|
||||
await _saveAgent(asTemplate);
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
console.error("Error saving agent", error);
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Error saving agent",
|
||||
description: errorMessage,
|
||||
});
|
||||
}
|
||||
},
|
||||
[
|
||||
api,
|
||||
|
||||
@@ -28,7 +28,7 @@ export default class BaseAutoGPTServerAPI {
|
||||
|
||||
constructor(
|
||||
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_SERVER_URL ||
|
||||
"http://localhost:8006/api",
|
||||
"http://localhost:8006/api/v1",
|
||||
wsUrl: string = process.env.NEXT_PUBLIC_AGPT_WS_SERVER_URL ||
|
||||
"ws://localhost:8001/ws",
|
||||
supabaseClient: SupabaseClient | null = null,
|
||||
@@ -287,7 +287,7 @@ export default class BaseAutoGPTServerAPI {
|
||||
errorDetail = response.statusText;
|
||||
}
|
||||
|
||||
throw new Error(`HTTP error ${response.status}! ${errorDetail}`);
|
||||
throw new Error(errorDetail);
|
||||
}
|
||||
|
||||
// Handle responses with no content (like DELETE requests)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -116,7 +116,7 @@ secrets:
|
||||
REPLICATE_API_KEY: "AgCPCgcYb+tE8/k45Y7/my4G2jWPCuEMTXJIn1fG1q4x4ZJPFzb43m7Uqtwn23NkmUZ5Qvh8BXedrtHwxapuYzw/P6c7xK66xfLKRbTWtYk4twS3sxPb+pt1FXY4USEjj5yeIFduybkqhE2QfnGoyrbDZ4Bz3AIgnrRD0Ee5m9u5yNZTPmJqZZqg4MRdUBCxCWIJBkW6DCE9nCPAQeNPD6e+lZ1j+/LocT2HX/ZlcsPXCxbn6wkxoyLqA0vUKSG9azS6oLvn0/3Cb01ozG8S2OEAqWIImFqhKGMfGqL6jSZWln43cmQdMTzSzM+HiprA9JHjZqGK7wOV9HZvSR+58IXoJGPBEIM7jIg5KqPjpZY4KFZBp5OiiRRYu+nCbuD+KsY/7ogjPHjbi1rpR8TrtXdzWNmwsTTmjytB/KEqeUpLWOEPgArFPyrNTS5/nmREH7r9jNEhfIRdTlS3IVGGXp/VN8napbNND1GDyzowvF771neq7/zTmfCRCJ4J0gwPNKM5rzOuRW+caEf2qOFBKIldVa/J0PFg5bAgpGL6jhpXHj0Q/+j1s3FA/D2ZebZTPIpKe40It3sWsS/0Qjhbj1GMbL4yUWvGpBSUTk7kZazkaVND1LbhjC+4AolTQdIU4MgW0bkmDn5ZI4a9/dHyLS3lFeYNSQ6vnbz+Id7zB3O0D6/FH8nfAUGL8V+J3eFKMp+G67z+XYH6WGABaNicz41zFBDF5hRax+k/ZziPPlFY0kDc3cAB6pLc"
|
||||
IDEOGRAM_API_KEY: "AgBTMZopEC5qALJvordVcdbUnwOEhLR5v6+7bTWg8GF2GSeNn2jKqM+o+wr2FjZJ0vtfyS+MjpOrr1xXrhdWLHg6HXyonjg67jS4BwxFrX60MbnkuoeYDX10O30JloAaMcq3a7OzLa/2r9RjqICO0fappaYHuysTcj6XD+wHsgQgZBSaqaomP7WR0YEIEiLVJaVFfQJaj1lqffIXxSQHE/oqafKqV3CeBPcD2jgwvErBMOBldTNDY2ehSmS54ebwLcXcLwf6JoQjXeit2Zry2ffnW5eNIIwYVl5GO4JT6rE2lB4B6jDs0z/y7toXdN5pGuVoYGDia8OKd7MuW3IlAWkhYLcoKla62/YzcC1JffMrqV++lWt4WFcuXWvL6UUNshwCdX/LQ9/tMt81cCQuin4mrVPxrEYALkin6HBaOWChCbhVS31lPWmFLM+vE+dvogY9Zp3PubxP5E3gsRLKn2LeAEXiGjBlf2H3IQ6/YI/WonsT1pKTHMkegHmvNz79XCFAJLnUIXlffznyTj+A+5VAVVl+i90mc2UtQLyEVuMbwK3lcoKB0IzNCs8OY/9DkuoROIikPOg79ZVmo+d1DzSl3y2tlR64Sw/pd5TL58+/MVbLFI35mKo1HKoxN+kUIDehFfZ8Sn8+8MkUsuAT7xfDvgYZpIxC2138XqTFJn2PhvdbQGB4l9dGIo+fjrddWuoEhAzZXJ8EmxTUaY+SRPKyWqKgf+w+qjdzRKqsE/zIo5Xj3LrnJh3VEC01waR1TdM+qIRTlpAO8m5oYHO8QB32GnabUbjPRDVIq2Vkp9qKpwgWtrBHTQ=="
|
||||
GROQ_API_KEY: "AgCxjMuIUxeJYxvGFMMRV3vOeTs7eJbA+oHsjCi6dCVL4/rk9GBK+NSDuG87ltIk5iwXsx75arP8YLHjifC0Jn5lfzxy4KLmQSkgfXBw+X05nO1zNoSWCadVhZDKAu1YL/FTD874f2UsSRLMV9BkqW2YNjN+WXab+dDTkicNcNTV6q/HKn6vMLeQd5PDv0Nbtup7B3C6rfSnIiKC0YH0Bqvc5BPEmw+6PVK9nPpdJiL6IxPX84LsJ/T1vMRNxlkFgEAN+3CZyx5q3ycMEQjOsBQGWny1HDeGuYeeYxRbg8PWMT+mx5uDampxqf66ztT1+PxpBx/+pgReSvWot5zGI1uN20Fiqav/n8VYF9x0wvYfFaSSGWeosjJ195MZDjq0cjWsy14lhGPbA839aZm+E6EQ3hmFzQULM4iR1sGu1OP87GhE6HbJ3Os4Gmsbo3XqWEy2puWSqcnZbkZvely5MGU6HDQ0L09EDMTG1PmmI3VmRiPUmJGxe+GtHaCmtJINsBrrbvjIki3TLSGwbIITKh5OEy5Y9My9H7WWZnzX8Dppkpqti1eUxh/GdtIZvLBve2N9nw65NRcx0sNp2knsO7Yk2+J2I9IrEwE8eC/AeBt7ii1ukCf1CSPNDj42GtqWgrQ15X85i7XCudmi2n4QfVL9/ZGOIjt95m8Ge+/o1Q4ltZBJ0wNgQPHClr12s/2H02BkmoJfUBlVQsGSc7yDz+cwPHLGb+tpjdPwENkm8yizTgK9/SJYa8gNLxFlJRa5MqZyMuye2A1mBQ=="
|
||||
REVID_API_KEY: "AgBPAmDtdzfHMbudluOeUZS7RCixfJXaI6vBvEUPQhhtpbNW9sUfDv6waKzBdjgWxIf2EqI4QI7QUVqHxA7fMChSjZjt9Np9z6+nHZhKWTqCCKyVR4Lka8tnkU5M62e+x+T7QoHy6mKsB7FEyQ8/FPxUM/Ddp5ZPTj6sbLn3y2uv1SPxInbd6boXeLEwQQcN/Inrww/lzNzJPec4jlHNwPHugjhZJnlWotvyfhU33Fdt5IEusdqt6CM0vS5N7lkR8KTNAg56VLD89yXVRR2VOtkJWzaQJ4lNSztBgUFNGaYtl7SRNVYnMpT1jhyTcAeO/fAGP4O/8haTlAZbfSsOLub5Af9CIA5vtNpU4zoY0Q2MOtKOJ5OTtbbJxmlWex3zc2wpIwdTLCRyBHxiPphdSBXQPW12s9NX6GVR7WDc3Lcvhi0P4uo+y01kyL64JGRigjvBzCYCqhNGWMtJ+YOy4pwcE89Qaz2/EvHgh37P9O7TQhN3Vo8BGRV0+DlNe6uv3OBitgayub5M34nh+qnNHypErkm29SovnqY1fEFhdOib2nmnE2fEXylZMtMPonBVYtVX9iOJLERQMNOzwDxaKzwPdpNu4GhMNVG6joCDjDqKPp9BfnKDJsn0GgUt3oDl5kRWdYL75HLDa3LDWGj0UiAF1YeQE2SHJjlrIoAVJWDdjwTsFJ1x3sMZTt9gB8KiUIDK/hgt7to/kaIyBUTrQIUv/2hHNYI4KH5/nNFZu3TPJXNrxfU="
|
||||
REVID_API_KEY: "AgB2+w7euCOnPTTag2oZWnTDiuHZJGjpsIVzIhlXXL4HURNEesANasrjyaQo1Lw7Zi2QDEKF+0XnZduOs0q5kRED3CUD9QN2aDClblOXC1g5zfmsrRp2wpISdsvOmPuMbzmVgIGA8fxSASm8+tlGh8T+QvlmXVvfD6ZWAbLXEW/4Inrz8o9RyEIjoS/g6NynVYg4CaY4xdG6KZIUuR3VkiI8irH8mXKf5O4LKNC9qKTMF8/tYyyv1gS8GAiU9JcuWKUYpqHoNah293d+vTVXVwo6o0GoQ+huxh+90cS3D1FPhiWHABeLYS292brnshJY3AuoCAPZxyQK69EXZgjUFCc+m5n2juF27P38QcCxtgr66kMXwg582OikJ77nDg6QHafIGnqSkUG3O184UOAzUe/iOjjDNlpxQaZoRQXP37zPIZcingWUx4zvZVjbsQcKSqdiI5u6K8kDw7F8wfCJI+3Y6YH9k7cWE+6ZmK1U7hEh3YKmqI5I0e4WolljRv8PlsWLZdgN5u5M7NqtYjqEhdd2fJ9fJYedXeATRefUHtthhvLeSbg3wS0f90pldIej+da1ZPpnToBN/o2YUA03pbQmty0Ce4EVBbwEN9sSNJGC1hicquURQP0baLBTsXzVZRkTT8OckxRkCN8UysTdkMtIf10AdQ0QVe6lAxR0DjnBL/ysgpSF1ZyKBWm6l7rBEsIIWXjBJpeGo1arVQVtepk8q1nLLWVpykJIfRnPRTCE41/Z9c0="
|
||||
ANTHROPIC_API_KEY: "AgB+40jZoawJ6HaeyyLZvDp2ByF4EPy2Ce806D/lekwJVmxpYXgkQdLJyav6bt9c1g5eDShJqezx1T+jGV+ApQbhSwmO403nJdYO825Fd3XVJ5K0xfFNt8DOTC9r2egWFvJZL40s/Y24kpr2Mmsqp+Sk9DxMNdYG/Z4PnT8PLWgb3yIYGGPVjlWlHL82gn4/B1bVTk14/cGXX9eSr0ktKKmS2OGLMYUwLT3oYGy4bRq9bH91XwjjdW6vMluNBGYibpi2f4h3nYORaQn42OhzTqON3XUdvNmsw1ZH0raMAJq47SU0lC6Ar9MzwbtUWY3tF6BlTmyx3gPavjQCnXg5cRxmY8JklrynDoyN+SBludzaWDzdAjr4vGPpkOo56RBY+28tnNjmxgyttGIYfFa8DEsrrJJgZZxSUvRqwJc0TWIKRFP1aIHkQ5DclUilFtNfPMezxwcFqrsYTEvtDjsls/E8uTNUN99cVQ2x0PxDsLKr9xVKAKkzzOBEvKEAJy0t5RtRV9A6kc0b16YbjIkFphip4e7HJTWKRvavknw+MXjGXXQrz9+xp8LrjRcgCyZp3BqSo+gsX8KQJSnhiFfKvgt7RdVaUVUA+sn5fIQGPWA5IkbI4gS12BRFDw59+Doc5FbCGUip4jDL8I2bPuNKQZLmSMx93Nu/60WBRhKXYz+GNzLzvhk0IpEI3d2GeWvC61p/f5eKnsLKNLDrc3k8rFHiUmXXhB+oQXMRSUFqq4hen32VVPWRhzT8nefww5Tud21CBg5+87x6WHOnB/A4vw+VuKD3fdeo9tn4HlD3w2funOVu9yv+NaP+MTeHus2PBeab/OtKLH09ezxnhmA="
|
||||
OPENAI_API_KEY: "AgAjPjhje52qw5YSXjRAwoXU0WyDEIAHnz2CjFtSjkpbsvvXFPlPMlOd/y7/dvABoyZHB9Ukxjna6opqV/hK/vHR9ncp9i7cDYX3Rekj+mkA6arMdqdJ0eikAGqWYPieu8RcBn6pHFGmoC8ZZPgk6Eh3Wyi4OCaPfH/O1bTq/RBQU5VDFvYfaeDZmYIu6SkD88pI0lT12Dklk1apsHlS+g3/rpQwDXgemE/pdmcNnt1zS6Ifu4isN7yg1pg1Thja+UiQnEkIiZkvmD39LO8HrwOFt8guJctRZ5gnVxPmSEdJLN089/fj5VXxTO1kTprbh1KeG9RKYS5LEPNYpgcl9/o884qMc/r0/+Cy7gL5R1THrEPHurVg8JfssCq8k0SaEtCElQ2081Scc/0p/k4URpXrsxUKZ8XUTIvYS0y2mEJPAAqaHAkwthY8sizhOwqWWnt8dGbCPwfQ11TiNSMikKIim9Bwm4tKM9aEolROSkivbGqFQQYSkensyp2mTqx7iFYlGBa7Z7PFRBZgPzD2FojWc6o5tLui5Xgi67ukO5WeaBhO6eMd2CuIlXqu+5x3+ixIytp9Jpke4mZKwbjYai4j3iELbzEwbGkjsnDyWNYn+1KnPOogd6i5+YPn09FbQO2Qvg2t3yUP/ePeX+fdRYk7AnS/o6nllqj9GLas48JFUlEx+KSO4qwrflRqPUmfmD3wPDTYR2q6yJzdatLYzdRQxEctFhgvco63uhW1YH+1ei1YuxutYPkIOqUwbgfIC3XiW7Tr3R8Gd3TimJLQM1etR6dwrEaEm0jTCIKUoZ+65OIAeVtcXIwWtwRjjUOtR8k9B2UdFoJOgtfIFzlxwYj4xUJrzRkCLdFD1W362n2+O5n7QdXDjYXn9KmxVUEph2vloeS8IGrBjM/l743A3trFD4CZ9g=="
|
||||
DATABASE_URL: "AgAfP8iiQGaA68dGVHQuHiKXldqhWungOlLEy6kg6nkKIY6LAwwUJbF59SrsbJ6Lvaq+40XiPSEV6ZjC1JpDyNQyPYzS6hUO9Ev82ViQ2H4Ba62jehBjXufVhabGurHe+F/WsyrXAEY496yX8I3/voy92bR+r0z66jRKHPwI+OXP2CyvdfIz6ziGwInkdfGdP0WRopvmSzbr/atUc1MGVBGuCvNguYWQ3WUwiF38EPObsoYpgV8fuD4trrFE2imHRs23AXMK/ntkqAjZwVWXfZNwaFECT9y1ue04rjDhuoFsL6lhvsK9Xf07mrTzBjdjJl0eCCTxsm0kZTTCwsPSq6H+6w8bjH33M1qeEnORwMuthFy4p0r3e+qlWbhHHwR6ku9wiwzCavDTd27EEfMKkD3zG7NrnbYA4zelHfG2q3/1/PZCeAOsa5jo0EuMTJr4p1Z6deKS4wevzOqJ/FcU1/5T24aKdxhVMnVrF9HKCLKHD+lJLJE8XgdZLFeded234nQfc9MGoBCD6FJvgfJCrjQh8QCSpm1aBKgu795Esff3ZqXJFiq7YCTQTOv/P6RXR5XA/LEqq1m5pcyBDzKixILE1SEbNdeXbYNhe7SbobKpQ9gq3f2ssCRNZGMgJtde6TQFx7J76IE4Eu9oqZefNQxHvh9lH2l0bROWy5NYKHfAejXnVGxIEVnoyRvyFB/HMOc="
|
||||
|
||||
54
autogpt_platform/market/poetry.lock
generated
54
autogpt_platform/market/poetry.lock
generated
@@ -829,13 +829,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.386"
|
||||
version = "1.1.387"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pyright-1.1.386-py3-none-any.whl", hash = "sha256:7071ac495593b2258ccdbbf495f1a5c0e5f27951f6b429bed4e8b296eb5cd21d"},
|
||||
{file = "pyright-1.1.386.tar.gz", hash = "sha256:8e9975e34948ba5f8e07792a9c9d2bdceb2c6c0b61742b068d2229ca2bc4a9d9"},
|
||||
{file = "pyright-1.1.387-py3-none-any.whl", hash = "sha256:6a1f495a261a72e12ad17e20d1ae3df4511223c773b19407cfa006229b1b08a5"},
|
||||
{file = "pyright-1.1.387.tar.gz", hash = "sha256:577de60224f7fe36505d5b181231e3a395d427b7873be0bbcaa962a29ea93a60"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1058,40 +1058,40 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.7.1"
|
||||
version = "0.7.2"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"},
|
||||
{file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"},
|
||||
{file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"},
|
||||
{file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"},
|
||||
{file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"},
|
||||
{file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"},
|
||||
{file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"},
|
||||
{file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"},
|
||||
{file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"},
|
||||
{file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"},
|
||||
{file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"},
|
||||
{file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"},
|
||||
{file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"},
|
||||
{file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"},
|
||||
{file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"},
|
||||
{file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"},
|
||||
{file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"},
|
||||
{file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.17.0"
|
||||
version = "2.18.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.17.0-py2.py3-none-any.whl", hash = "sha256:625955884b862cc58748920f9e21efdfb8e0d4f98cca4ab0d3918576d5b606ad"},
|
||||
{file = "sentry_sdk-2.17.0.tar.gz", hash = "sha256:dd0a05352b78ffeacced73a94e86f38b32e2eae15fff5f30ca5abb568a72eacf"},
|
||||
{file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"},
|
||||
{file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1120,9 +1120,11 @@ httpx = ["httpx (>=0.16.0)"]
|
||||
huey = ["huey (>=2)"]
|
||||
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||
langchain = ["langchain (>=0.0.210)"]
|
||||
launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
openfeature = ["openfeature-sdk (>=0.7.1)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
@@ -1296,4 +1298,4 @@ watchmedo = ["PyYAML (>=3.10)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "f94a7651b3ddc7819bafdc75384cb8ad34f7a5415e652ecb5bd82d632d26e693"
|
||||
content-hash = "361b9204c1acb3460c52f93879693fc2a0b506b4806a0ec884f2b9307b9c2e2d"
|
||||
|
||||
@@ -14,7 +14,7 @@ prisma = "^0.15.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
uvicorn = "^0.32.0"
|
||||
fastapi = "^0.115.4"
|
||||
sentry-sdk = { extras = ["fastapi"], version = "^2.17.0" }
|
||||
sentry-sdk = { extras = ["fastapi"], version = "^2.18.0" }
|
||||
fuzzywuzzy = "^0.18.0"
|
||||
python-levenshtein = "^0.26.1"
|
||||
# autogpt-platform-backend = { path = "../backend", develop = true }
|
||||
@@ -28,8 +28,8 @@ pytest-asyncio = "^0.24.0"
|
||||
|
||||
pytest-watcher = "^0.4.3"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.7.1"
|
||||
pyright = "^1.1.386"
|
||||
ruff = "^0.7.2"
|
||||
pyright = "^1.1.387"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def populate_database():
|
||||
|
||||
import market.model
|
||||
|
||||
templates = pathlib.Path(__file__).parent.parent / "backend" / "graph_templates"
|
||||
templates = pathlib.Path(__file__).parent.parent / "graph_templates"
|
||||
|
||||
all_files = glob.glob(str(templates / "*.json"))
|
||||
|
||||
|
||||
Reference in New Issue
Block a user