feat(rnd): Implement agent block execution logic for AutoGPT Server (#7194)

### Background

This PR implements the main logic of the block execution engine for AutoGPT-Server.
An integration test is added to test the behavior.

*What you can do now with this PR*:
You can manually create a graph, by using the existing blocks as nodes (or write your own). Then execute the graph with an input.

*What you can't do yet*:
Listen to the graph execution result/update (you can follow the `AgentNodeExecution` table result, though).

### Changes 🏗️
* Split `data.py` (model file) into three modules:
    * `execution`: a model for node execution.
    * `graph`: a model for graph structure.
    * `block`: a model for agent block/component.
* Implemented executor main logic
* Simplify db structure:
    * Remove `AgentBlockInputOutput` in favor of `inputSchema` & `outputSchema` using serialized json/dict structure.
    * Remove `id` on `AgentBlock` in favor of using name (class name of the block) as its identifier.
    * Added `constantInput` column for `AgentNode` for hard-coded input/block configuration. Hence, removing`executionStateData` on `AgentNodeExecution`.
    * Rename AgentNodeLink input/output to source/sink to avoid confusion
* Change multithreading to multiprocessing, to allow the use of multiple `prisma` asynchronous client.
This commit is contained in:
Zamil Majdy
2024-06-10 19:30:34 +07:00
committed by GitHub
parent b803e42189
commit e688cc31f0
12 changed files with 1082 additions and 137 deletions

View File

@@ -1,7 +1,7 @@
from multiprocessing import freeze_support
from multiprocessing.spawn import freeze_support as freeze_support_spawn
from autogpt_server.data import ExecutionQueue
from autogpt_server.data.execution import ExecutionQueue
from autogpt_server.executor import start_executor_manager
from autogpt_server.server import start_server

View File

@@ -1,36 +0,0 @@
import uuid
from multiprocessing import Queue
class Execution:
"""Data model for an execution of an Agent"""
def __init__(self, execution_id: str, data: str):
self.execution_id = execution_id
self.data = data
# TODO: This shared class make api & executor coupled in one machine.
# Replace this with a persistent & remote-hosted queue.
# One very likely candidate would be persisted Redis (Redis Queue).
# It will also open the possibility of using it for other purposes like
# caching, execution engine broker (like Celery), user session management etc.
class ExecutionQueue:
"""
Queue for managing the execution of agents.
This will be shared between different processes
"""
def __init__(self):
self.queue: Queue[Execution] = Queue()
def add(self, data: str) -> str:
execution_id = uuid.uuid4()
self.queue.put(Execution(str(execution_id), data))
return str(execution_id)
def get(self) -> Execution | None:
return self.queue.get()
def empty(self) -> bool:
return self.queue.empty()

View File

@@ -0,0 +1,236 @@
import json
import jsonschema
from abc import ABC, abstractmethod
from prisma.models import AgentBlock
from pydantic import BaseModel
from typing import Any, ClassVar
BlockData = dict[str, Any]
class BlockSchema(BaseModel):
"""
A schema for the block input and output data.
The dictionary structure is an object-typed `jsonschema`.
The top-level properties are the block input/output names.
You can initialize this class by providing a dictionary of properties.
The key is the string of the property name, and the value is either
a string of the type or a dictionary of the jsonschema.
You can also provide additional keyword arguments for additional properties.
Like `name`, `required` (by default all properties are required), etc.
Example:
input_schema = BlockSchema({
"system_prompt": "string",
"user_prompt": "string",
"max_tokens": "integer",
"user_info": {
"type": "object",
"properties": {
"name": {"type": "string"},
"age": {"type": "integer"},
},
"required": ["name"],
},
}, required=["system_prompt", "user_prompt"])
output_schema = BlockSchema({
"on_complete": "string",
"on_failures": "string",
})
"""
jsonschema: dict[str, Any]
def __init__(
self,
properties: dict[str, str | dict],
required: list[str] | None = None,
**kwargs: Any
):
schema = {
"type": "object",
"properties": {
key: {"type": value} if isinstance(value, str) else value
for key, value in properties.items()
},
"required": required or list(properties.keys()),
**kwargs,
}
super().__init__(jsonschema=schema)
def __str__(self) -> str:
return json.dumps(self.jsonschema)
def validate_data(self, data: BlockData) -> str | None:
"""
Validate the data against the schema.
Returns the validation error message if the data does not match the schema.
"""
try:
jsonschema.validate(data, self.jsonschema)
return None
except jsonschema.ValidationError as e:
return str(e)
def validate_field(self, field_name: str, data: BlockData) -> str | None:
"""
Validate the data against a specific property (one of the input/output name).
Returns the validation error message if the data does not match the schema.
"""
property_schema = self.jsonschema["properties"].get(field_name)
if not property_schema:
return f"Invalid property name {field_name}"
try:
jsonschema.validate(data, property_schema)
return None
except jsonschema.ValidationError as e:
return str(e)
class Block(ABC, BaseModel):
@classmethod
@property
@abstractmethod
def id(cls) -> str:
"""
The unique identifier for the block, this value will be persisted in the DB.
So it should be a unique and constant across the application run.
Use the UUID format for the ID.
"""
pass
@classmethod
@property
@abstractmethod
def input_schema(cls) -> BlockSchema:
"""
The schema for the block input data.
The top-level properties are the possible input name expected by the block.
"""
pass
@classmethod
@property
@abstractmethod
def output_schema(cls) -> BlockSchema:
"""
The schema for the block output.
The top-level properties are the possible output name produced by the block.
"""
pass
@abstractmethod
async def run(self, input_data: BlockData) -> tuple[str, Any]:
"""
Run the block with the given input data.
Args:
input_data: The input data with the structure of input_schema.
Returns:
The (output name, output data), matching the type in output_schema.
"""
pass
@classmethod
@property
def name(cls):
return cls.__name__
async def execute(self, input_data: BlockData) -> tuple[str, Any]:
if error := self.input_schema.validate_data(input_data):
raise ValueError(
f"Unable to execute block with invalid input data: {error}"
)
output_name, output_data = await self.run(input_data)
if error := self.output_schema.validate_field(output_name, output_data):
raise ValueError(
f"Unable to execute block with invalid output data: {error}"
)
return output_name, output_data
# ===================== Inline-Block Implementations ===================== #
class ParrotBlock(Block):
id: ClassVar[str] = "1ff065e9-88e8-4358-9d82-8dc91f622ba9" # type: ignore
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"input": "string",
})
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"output": "string",
})
async def run(self, input_data: BlockData) -> tuple[str, Any]:
return "output", input_data["input"]
class TextCombinerBlock(Block):
id: ClassVar[str] = "db7d8f02-2f44-4c55-ab7a-eae0941f0c30" # type: ignore
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"text1": "string",
"text2": "string",
"format": "string",
})
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"combined_text": "string",
})
async def run(self, input_data: BlockData) -> tuple[str, Any]:
return "combined_text", input_data["format"].format(
text1=input_data["text1"],
text2=input_data["text2"],
)
class PrintingBlock(Block):
id: ClassVar[str] = "f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c" # type: ignore
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"text": "string",
})
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
"status": "string",
})
async def run(self, input_data: BlockData) -> tuple[str, Any]:
print(input_data["text"])
return "status", "printed"
# ======================= Block Helper Functions ======================= #
AVAILABLE_BLOCKS: dict[str, Block] = {}
async def initialize_blocks() -> None:
global AVAILABLE_BLOCKS
AVAILABLE_BLOCKS = {block.id: block() for block in Block.__subclasses__()}
for block in AVAILABLE_BLOCKS.values():
existing_block = await AgentBlock.prisma().find_unique(
where={"id": block.id}
)
if existing_block:
continue
await AgentBlock.prisma().create(
data={
"id": block.id,
"name": block.name,
"inputSchema": str(block.input_schema),
"outputSchema": str(block.output_schema),
}
)
async def get_block(block_id: str) -> Block:
if not AVAILABLE_BLOCKS:
await initialize_blocks()
return AVAILABLE_BLOCKS[block_id]

View File

@@ -0,0 +1,27 @@
import asyncio
from uuid import uuid4
from prisma import Prisma
from pydantic import BaseModel
prisma = Prisma(auto_register=True)
def connect_sync():
asyncio.get_event_loop().run_until_complete(connect())
async def connect():
await prisma.connect()
async def disconnect():
await prisma.disconnect()
class BaseDbModel(BaseModel):
id: str = ""
def __init__(self, id: str = "", **data):
data["id"] = id or str(uuid4())
super().__init__(**data)

View File

@@ -0,0 +1,98 @@
import json
from datetime import datetime
from enum import Enum
from multiprocessing import Queue
from prisma.models import AgentNodeExecution
from typing import Any
from autogpt_server.data.db import BaseDbModel
class Execution(BaseDbModel):
"""Data model for an execution of an Agent"""
run_id: str
node_id: str
data: dict[str, Any]
class ExecutionStatus(str, Enum):
QUEUED = "QUEUED"
RUNNING = "RUNNING"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
# TODO: This shared class make api & executor coupled in one machine.
# Replace this with a persistent & remote-hosted queue.
# One very likely candidate would be persisted Redis (Redis Queue).
# It will also open the possibility of using it for other purposes like
# caching, execution engine broker (like Celery), user session management etc.
class ExecutionQueue:
"""
Queue for managing the execution of agents.
This will be shared between different processes
"""
def __init__(self):
self.queue: Queue[Execution] = Queue()
def add(self, execution: Execution) -> Execution:
self.queue.put(execution)
return execution
def get(self) -> Execution:
return self.queue.get()
def empty(self) -> bool:
return self.queue.empty()
async def add_execution(execution: Execution, queue: ExecutionQueue) -> Execution:
await AgentNodeExecution.prisma().create(
data={
"id": execution.id,
"executionId": execution.run_id,
"agentNodeId": execution.node_id,
"executionStatus": ExecutionStatus.QUEUED,
"inputData": json.dumps(execution.data),
"creationTime": datetime.now(),
}
)
return queue.add(execution)
async def start_execution(exec_id: str) -> None:
await AgentNodeExecution.prisma().update(
where={"id": exec_id},
data={
"executionStatus": ExecutionStatus.RUNNING,
"startTime": datetime.now(),
},
)
async def complete_execution(exec_id: str, output: tuple[str, Any]) -> None:
output_name, output_data = output
await AgentNodeExecution.prisma().update(
where={"id": exec_id},
data={
"executionStatus": ExecutionStatus.COMPLETED,
"outputName": output_name,
"outputData": json.dumps(output_data),
"endTime": datetime.now(),
},
)
async def fail_execution(exec_id: str, error: Exception) -> None:
await AgentNodeExecution.prisma().update(
where={"id": exec_id},
data={
"executionStatus": ExecutionStatus.FAILED,
"outputName": "error",
"outputData": str(error),
"endTime": datetime.now(),
},
)

View File

@@ -0,0 +1,173 @@
import asyncio
import json
import uuid
from prisma.models import AgentGraph, AgentNode, AgentNodeLink, AgentNodeExecution
from typing import Any
from autogpt_server.data.db import BaseDbModel
from autogpt_server.data.block import get_block
class Node(BaseDbModel):
block_id: str
input_default: dict[str, Any] = {} # dict[input_name, default_value]
input_nodes: dict[str, str] = {} # dict[input_name, node_id]
# TODO: Make it `dict[str, list[str]]`, output can be connected to multiple blocks.
# Other option is to use an edge-list, but it will complicate the rest code.
output_nodes: dict[str, str] = {} # dict[output_name, node_id]
@staticmethod
def from_db(node: AgentNode):
if not node.AgentBlock:
raise ValueError(f"Invalid node {node.id}, invalid AgentBlock.")
return Node(
id=node.id,
block_id=node.AgentBlock.id,
input_default=json.loads(node.constantInput),
input_nodes={v.sinkName: v.agentNodeSourceId for v in node.Input or []},
output_nodes={v.sourceName: v.agentNodeSinkId for v in node.Output or []},
)
def connect(self, node: "Node", source_name: str, sink_name: str):
self.output_nodes[source_name] = node.id
node.input_nodes[sink_name] = self.id
@property
async def block(self):
return await get_block(self.block_id)
class Graph(BaseDbModel):
name: str
description: str
nodes: list[Node]
@property
def starting_nodes(self) -> list[Node]:
return [node for node in self.nodes if not node.input_nodes]
@staticmethod
def from_db(graph: AgentGraph):
return Graph(
id=graph.id,
name=graph.name or "",
description=graph.description or "",
nodes=[Node.from_db(node) for node in graph.AgentNodes or []],
)
EXECUTION_NODE_INCLUDE = {
"Input": True,
"Output": True,
"AgentBlock": True,
}
async def get_node(node_id: str) -> Node | None:
node = await AgentNode.prisma().find_unique_or_raise(
where={"id": node_id},
include=EXECUTION_NODE_INCLUDE, # type: ignore
)
return Node.from_db(node) if node else None
async def get_graph(graph_id: str) -> Graph | None:
graph = await AgentGraph.prisma().find_unique(
where={"id": graph_id},
include={"AgentNodes": {"include": EXECUTION_NODE_INCLUDE}}, # type: ignore
)
return Graph.from_db(graph) if graph else None
async def get_node_input(node: Node, exec_id: str) -> dict[str, Any]:
"""
Get execution node input data from the previous node execution result.
Args:
node: The execution node.
exec_id: The execution ID.
Returns:
dictionary of input data, key is the input name, value is the input data.
"""
query = AgentNodeExecution.prisma().find_many(
where={ # type: ignore
"executionId": exec_id,
"agentNodeId": {"in": list(node.input_nodes.values())},
"executionStatus": "COMPLETED",
},
distinct=["agentNodeId"], # type: ignore
order={"creationTime": "desc"},
)
latest_executions: dict[str, AgentNodeExecution] = {
execution.agentNodeId: execution for execution in await query
}
return {
**node.input_default,
**{
name: json.loads(latest_executions[node_id].outputData or "{}")
for name, node_id in node.input_nodes.items()
if node_id in latest_executions and latest_executions[node_id].outputData
},
}
async def create_graph(graph: Graph) -> Graph:
await AgentGraph.prisma().create(
data={
"id": graph.id,
"name": graph.name,
"description": graph.description,
}
)
# TODO: replace bulk creation using create_many
await asyncio.gather(
*[
AgentNode.prisma().create(
{
"id": node.id,
"agentBlockId": node.block_id,
"agentGraphId": graph.id,
"constantInput": json.dumps(node.input_default),
}
)
for node in graph.nodes
]
)
edge_source_names = {
(source_node.id, sink_node_id): output_name
for source_node in graph.nodes
for output_name, sink_node_id in source_node.output_nodes.items()
}
edge_sink_names = {
(source_node_id, sink_node.id): input_name
for sink_node in graph.nodes
for input_name, source_node_id in sink_node.input_nodes.items()
}
# TODO: replace bulk creation using create_many
await asyncio.gather(
*[
AgentNodeLink.prisma().create(
{
"id": str(uuid.uuid4()),
"sourceName": edge_source_names.get((input_node, output_node), ""),
"sinkName": edge_sink_names.get((input_node, output_node), ""),
"agentNodeSourceId": input_node,
"agentNodeSinkId": output_node,
}
)
for input_node, output_node in (
edge_source_names.keys() | edge_sink_names.keys()
)
]
)
if created_graph := await get_graph(graph.id):
return created_graph
raise ValueError(f"Failed to create graph {graph.id}.")

View File

@@ -1,36 +1,132 @@
import asyncio
import logging
import time
from concurrent.futures import ThreadPoolExecutor
from typing import Optional
from concurrent.futures import ProcessPoolExecutor
from multiprocessing import Process
from autogpt_server.data import Execution, ExecutionQueue
from autogpt_server.data import block, db, graph
from autogpt_server.data.execution import (
Execution,
ExecutionQueue,
add_execution,
complete_execution,
start_execution,
fail_execution,
)
logger = logging.getLogger(__name__)
# TODO: Replace this by an actual Agent Execution.
def execute_node(id: str, data: str) -> None:
logger.warning(f"Executor processing started, execution_id: {id}, data: {data}")
for i in range(5):
def get_log_prefix(run_id: str, exec_id: str, block_name: str = "-"):
return f"[Execution graph-{run_id}|node-{exec_id}|{block_name}]"
async def execute_node(data: Execution) -> Execution | None:
"""
Execute a node in the graph. This will trigger a block execution on a node,
persist the execution result, and return the subsequent node to be executed.
Args:
data: The execution data for executing the current node.
Returns:
The subsequent node to be enqueued, or None if there is no subsequent node.
"""
run_id = data.run_id
exec_id = data.id
exec_data = data.data
node_id = data.node_id
node = await graph.get_node(node_id)
if not node:
logger.error(f"Node {node_id} not found.")
return None
node_block = await block.get_block(node.block_id)
if not node_block:
logger.error(f"Block {node.block_id} not found.")
return None
# Execute the node
prefix = get_log_prefix(run_id, exec_id, node_block.name)
logger.warning(f"{prefix} execute with input:\n{exec_data}")
await start_execution(exec_id)
try:
output_name, output_data = await node_block.execute(exec_data)
logger.warning(f"{prefix} executed with output: `{output_name}`:{output_data}")
await complete_execution(exec_id, (output_name, output_data))
except Exception as e:
logger.exception(f"{prefix} failed with error: %s", e)
await fail_execution(exec_id, e)
raise e
# Try to enqueue next eligible nodes
if output_name not in node.output_nodes:
logger.error(f"{prefix} output name `{output_name}` has no subsequent node.")
return None
next_node_id = node.output_nodes[output_name]
next_node = await graph.get_node(next_node_id)
if not next_node:
logger.error(f"{prefix} Error, next node {next_node_id} not found.")
return None
next_node_input = await graph.get_node_input(next_node, run_id)
next_node_block = await next_node.block
if error := next_node_block.input_schema.validate_data(next_node_input):
logger.warning(
f"Executor processing step {i}, execution_id: {id}, data: {data}"
)
time.sleep(1)
logger.warning(f"Executor processing completed, execution_id: {id}, data: {data}")
f"{prefix} Skipped {next_node_id}-{next_node_block.name}, {error}")
return None
logger.warning(f"{prefix} Enqueue next node {next_node_id}-{next_node_block.name}")
return Execution(
run_id=run_id, node_id=next_node_id, data=next_node_input
)
def execute_node_sync(data: Execution) -> Optional[Execution | None]:
"""
A synchronous version of `execute_node`, to be used in the ProcessPoolExecutor.
"""
prefix = get_log_prefix(data.run_id, data.id)
try:
logger.warning(f"{prefix} Start execution")
loop = asyncio.get_event_loop()
return loop.run_until_complete(execute_node(data))
except Exception as e:
logger.error(f"{prefix} Error: {e}")
def start_executor(pool_size: int, queue: ExecutionQueue) -> None:
with ThreadPoolExecutor(max_workers=pool_size) as executor:
loop = asyncio.get_event_loop()
loop.run_until_complete(db.connect())
loop.run_until_complete(block.initialize_blocks())
def on_complete_execution(f: asyncio.Future[Execution | None]):
exception = f.exception()
if exception:
logger.exception("Error during execution!! %s", exception)
return exception
execution = f.result()
if execution:
loop.run_until_complete(add_execution(execution, queue))
return exception
return None
logger.warning("Executor started!")
with ProcessPoolExecutor(
max_workers=pool_size,
initializer=db.connect_sync,
) as executor:
while True:
execution: Execution | None = queue.get()
if not execution:
time.sleep(1)
continue
executor.submit(
execute_node,
execution.execution_id,
execution.data,
) # type: ignore
future = executor.submit(execute_node_sync, queue.get())
future.add_done_callback(on_complete_execution) # type: ignore
def start_executor_manager(pool_size: int, queue: ExecutionQueue) -> None:

View File

@@ -1,11 +1,14 @@
import asyncio
import uuid
import uvicorn
from fastapi import APIRouter, FastAPI
from autogpt_server.data import ExecutionQueue
from fastapi import APIRouter, FastAPI, HTTPException
from autogpt_server.data import db, execution, graph
class AgentServer:
def __init__(self, queue: ExecutionQueue):
def __init__(self, queue: execution.ExecutionQueue):
self.app = FastAPI(
title="AutoGPT Agent Server",
description=(
@@ -25,14 +28,38 @@ class AgentServer:
methods=["POST"],
)
self.app.include_router(self.router)
self.app.on_event("startup")(db.connect)
self.app.on_event("shutdown")(db.disconnect)
def execute_agent(self, agent_id: str):
execution_id = self.execution_queue.add(agent_id)
return {"execution_id": execution_id, "agent_id": agent_id}
async def execute_agent(self, agent_id: str, node_input: dict):
agent = await graph.get_graph(agent_id)
if not agent:
raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.")
run_id = str(uuid.uuid4())
tasks = []
# Currently, there is no constraint on the number of root nodes in the graph.
for node in agent.starting_nodes:
block = await node.block
if error := block.input_schema.validate_data(node_input):
raise HTTPException(
status_code=400,
detail=f"Input data doesn't match {block.name} input: {error}",
)
task = execution.add_execution(
execution.Execution(
run_id=run_id, node_id=node.id, data=node_input
),
self.execution_queue,
)
tasks.append(task)
return await asyncio.gather(*tasks)
def start_server(queue: ExecutionQueue, use_uvicorn: bool = True):
app = AgentServer(queue).app
if use_uvicorn:
uvicorn.run(app)
return app
def start_server(queue: execution.ExecutionQueue):
agent_server = AgentServer(queue)
uvicorn.run(agent_server.app)

View File

@@ -33,6 +33,25 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
trio = ["trio (>=0.23)"]
[[package]]
name = "attrs"
version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]]
name = "certifi"
version = "2024.6.2"
@@ -211,6 +230,22 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "flake8"
version = "7.0.0"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.8.1"
files = [
{file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"},
{file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.11.0,<2.12.0"
pyflakes = ">=3.2.0,<3.3.0"
[[package]]
name = "h11"
version = "0.14.0"
@@ -354,6 +389,41 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "jsonschema"
version = "4.22.0"
description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"},
{file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"},
]
[package.dependencies]
attrs = ">=22.2.0"
jsonschema-specifications = ">=2023.03.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
[package.extras]
format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"]
[[package]]
name = "jsonschema-specifications"
version = "2023.12.1"
description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
optional = false
python-versions = ">=3.8"
files = [
{file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"},
{file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"},
]
[package.dependencies]
referencing = ">=0.31.0"
[[package]]
name = "lief"
version = "0.14.1"
@@ -462,6 +532,17 @@ files = [
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
[[package]]
name = "mccabe"
version = "0.7.0"
description = "McCabe checker, plugin for flake8"
optional = false
python-versions = ">=3.6"
files = [
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -573,6 +654,17 @@ typing-extensions = ">=4.5.0"
all = ["nodejs-bin"]
node = ["nodejs-bin"]
[[package]]
name = "pycodestyle"
version = "2.11.1"
description = "Python style guide checker"
optional = false
python-versions = ">=3.8"
files = [
{file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"},
{file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"},
]
[[package]]
name = "pydantic"
version = "2.7.3"
@@ -683,6 +775,17 @@ files = [
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pyflakes"
version = "3.2.0"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.8"
files = [
{file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"},
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
]
[[package]]
name = "pytest"
version = "8.2.2"
@@ -705,6 +808,24 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.23.7"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"},
{file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"},
]
[package.dependencies]
pytest = ">=7.0.0,<9"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "python-dotenv"
version = "1.0.1"
@@ -779,6 +900,155 @@ files = [
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
name = "referencing"
version = "0.35.1"
description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
{file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
]
[package.dependencies]
attrs = ">=22.2.0"
rpds-py = ">=0.7.0"
[[package]]
name = "rpds-py"
version = "0.18.1"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.8"
files = [
{file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"},
{file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"},
{file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"},
{file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"},
{file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"},
{file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
{file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"},
{file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"},
{file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"},
{file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"},
{file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"},
{file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"},
{file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"},
{file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"},
{file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"},
{file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"},
{file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"},
{file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"},
{file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"},
{file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"},
{file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"},
{file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"},
{file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"},
{file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"},
{file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"},
{file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"},
{file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"},
{file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"},
{file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"},
{file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"},
]
[[package]]
name = "ruff"
version = "0.4.8"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.4.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7663a6d78f6adb0eab270fa9cf1ff2d28618ca3a652b60f2a234d92b9ec89066"},
{file = "ruff-0.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eeceb78da8afb6de0ddada93112869852d04f1cd0f6b80fe464fd4e35c330913"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aad360893e92486662ef3be0a339c5ca3c1b109e0134fcd37d534d4be9fb8de3"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:284c2e3f3396fb05f5f803c9fffb53ebbe09a3ebe7dda2929ed8d73ded736deb"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7354f921e3fbe04d2a62d46707e569f9315e1a613307f7311a935743c51a764"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:72584676164e15a68a15778fd1b17c28a519e7a0622161eb2debdcdabdc71883"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9678d5c9b43315f323af2233a04d747409d1e3aa6789620083a82d1066a35199"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704977a658131651a22b5ebeb28b717ef42ac6ee3b11e91dc87b633b5d83142b"},
{file = "ruff-0.4.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d05f8d6f0c3cce5026cecd83b7a143dcad503045857bc49662f736437380ad45"},
{file = "ruff-0.4.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6ea874950daca5697309d976c9afba830d3bf0ed66887481d6bca1673fc5b66a"},
{file = "ruff-0.4.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fc95aac2943ddf360376be9aa3107c8cf9640083940a8c5bd824be692d2216dc"},
{file = "ruff-0.4.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:384154a1c3f4bf537bac69f33720957ee49ac8d484bfc91720cc94172026ceed"},
{file = "ruff-0.4.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e9d5ce97cacc99878aa0d084c626a15cd21e6b3d53fd6f9112b7fc485918e1fa"},
{file = "ruff-0.4.8-py3-none-win32.whl", hash = "sha256:6d795d7639212c2dfd01991259460101c22aabf420d9b943f153ab9d9706e6a9"},
{file = "ruff-0.4.8-py3-none-win_amd64.whl", hash = "sha256:e14a3a095d07560a9d6769a72f781d73259655919d9b396c650fc98a8157555d"},
{file = "ruff-0.4.8-py3-none-win_arm64.whl", hash = "sha256:14019a06dbe29b608f6b7cbcec300e3170a8d86efaddb7b23405cb7f7dcaf780"},
{file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"},
]
[[package]]
name = "setuptools"
version = "69.5.1"
@@ -1127,4 +1397,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "d999a99c717019087f238f4f1aeb89b1671a77e53f175108bf91fb85ac0be657"
content-hash = "bbe3a66ddebdb30ff4ca357e5d403659d2a7549b38adad691817a485d503062f"

View File

@@ -16,6 +16,10 @@ prisma = "^0.13.1"
pytest = "^8.2.1"
uvicorn = { extras = ["standard"], version = "^0.30.1" }
fastapi = "^0.109.0"
pytest-asyncio = "^0.23.7"
ruff = "^0.4.8"
flake8 = "^7.0.0"
jsonschema = "^4.22.0"
[tool.poetry.group.dev.dependencies]

View File

@@ -15,9 +15,6 @@ model AgentGraph {
name String?
description String?
startingAgentNodeId String
StartingAgentNode AgentNode @relation("AgentGraphRoot", fields: [startingAgentNodeId], references: [id])
AgentNodes AgentNode[] @relation("AgentGraphNodes")
}
@@ -32,30 +29,30 @@ model AgentNode {
AgentGraph AgentGraph @relation("AgentGraphNodes", fields: [agentGraphId], references: [id])
// List of consumed input, that the parent node should provide.
Input AgentNodeLink[] @relation("AgentNodeInput")
Input AgentNodeLink[] @relation("AgentNodeSink")
// List of produced output, that the child node should be executed.
Output AgentNodeLink[] @relation("AgentNodeOutput")
Output AgentNodeLink[] @relation("AgentNodeSource")
// JSON serialized dict[str, str] containing predefined input values.
constantInput String @default("{}")
ExecutionHistory AgentNodeExecution[]
// Prisma requires explicit back-references.
ReferencedByAgentGraphAsRoot AgentGraph[] @relation("AgentGraphRoot")
}
// This model describes the link between two AgentNodes.
model AgentNodeLink {
id String @id
agentNodeInputId String
AgentNodeInput AgentNode @relation("AgentNodeOutput", fields: [agentNodeInputId], references: [id]) // Output of the node is the input of the link.
agentNodeInputSchemaId String
AgentNodeInputSchema AgentBlockInputOutput @relation("AgentNodeInputSchema", fields: [agentNodeInputSchemaId], references: [id])
// Output of a node is connected to the source of the link.
agentNodeSourceId String
AgentNodeSource AgentNode @relation("AgentNodeSource", fields: [agentNodeSourceId], references: [id])
sourceName String
agentNodeOutputId String
AgentNodeOutput AgentNode @relation("AgentNodeInput", fields: [agentNodeOutputId], references: [id]) // Input of the node is the output of the link.
agentNodeOutputSchemaId String
AgentNodeOutputSchema AgentBlockInputOutput @relation("AgentNodeOutputSchema", fields: [agentNodeOutputSchemaId], references: [id])
// Input of a node is connected to the sink of the link.
agentNodeSinkId String
AgentNodeSink AgentNode @relation("AgentNodeSink", fields: [agentNodeSinkId], references: [id])
sinkName String
}
// This model describes a component that will be executed by the AgentNode.
@@ -63,49 +60,35 @@ model AgentBlock {
id String @id
name String @unique
// We allow a block to have multiple types of output.
Input AgentBlockInputOutput[] @relation("AgentBlockInput")
Output AgentBlockInputOutput[] @relation("AgentBlockOutput")
// We allow a block to have multiple types of input & output.
// Serialized object-typed `jsonschema` with top-level properties as input/output name.
inputSchema String
outputSchema String
// Prisma requires explicit back-references.
ReferencedByAgentNode AgentNode[]
}
// This model describes the output (produced event) or input (consumed event) of an AgentBlock.
model AgentBlockInputOutput {
id String @id
name String
schema String
description String
// Prisma requires explicit back-references.
ReferencedByAgentBlockInput AgentBlock[] @relation("AgentBlockInput")
ReferencedByAgentBlockOutput AgentBlock[] @relation("AgentBlockOutput")
ReferencedByAgentNodeLinkAsInput AgentNodeLink[] @relation("AgentNodeInputSchema")
ReferencedByAgentNodeLinkAsOutput AgentNodeLink[] @relation("AgentNodeOutputSchema")
ReferencedByAgentNodeExecution AgentNodeExecution[]
}
// This model describes the execution of an AgentNode.
model AgentNodeExecution {
id String @id
id String @id
executionId String
agentNodeId String
AgentNode AgentNode @relation(fields: [agentNodeId], references: [id])
inputData String
inputFiles FileDefinition[] @relation("InputFiles")
outputData String
outputFiles FileDefinition[] @relation("OutputFiles")
outputTypeId String?
outputType AgentBlockInputOutput? @relation(fields: [outputTypeId], references: [id])
inputData String?
inputFiles FileDefinition[] @relation("InputFiles")
outputName String?
outputData String?
outputFiles FileDefinition[] @relation("OutputFiles")
// sqlite does not support enum
// enum Status { STARTED, RUNNING, SUCCESS, FAILED }
// enum Status { QUEUED, RUNNING, SUCCESS, FAILED }
executionStatus String
// JSON serialized object of the execution state: information required to resume the execution.
executionStateData String
creationTime DateTime
startTime DateTime?
endTime DateTime?
}
// This model describes a file that can be used as input/output of an AgentNodeExecution.

View File

@@ -1,30 +1,97 @@
import pytest
from fastapi.testclient import TestClient
from autogpt_server.data import ExecutionQueue
from autogpt_server.executor import start_executor_manager
from autogpt_server.server import start_server
from autogpt_server.data import block, db, graph
from autogpt_server.data.execution import ExecutionQueue, add_execution
from autogpt_server.executor import executor
from autogpt_server.server import server
@pytest.fixture
def client():
execution_queue = ExecutionQueue()
start_executor_manager(5, execution_queue)
return TestClient(start_server(execution_queue, use_uvicorn=False))
async def create_test_graph() -> graph.Graph:
"""
ParrotBlock
\
---- TextCombinerBlock ---- PrintingBlock
/
ParrotBlock
"""
nodes = [
graph.Node(block_id=block.ParrotBlock.id),
graph.Node(block_id=block.ParrotBlock.id),
graph.Node(
block_id=block.TextCombinerBlock.id,
input_default={"format": "{text1},{text2}"}
),
graph.Node(block_id=block.PrintingBlock.id),
]
nodes[0].connect(nodes[2], "output", "text1")
nodes[1].connect(nodes[2], "output", "text2")
nodes[2].connect(nodes[3], "combined_text", "text")
test_graph = graph.Graph(
name="TestGraph",
description="Test graph",
nodes=nodes,
)
await block.initialize_blocks()
result = await graph.create_graph(test_graph)
# Assertions
assert result.name == test_graph.name
assert result.description == test_graph.description
assert len(result.nodes) == len(test_graph.nodes)
return result
def test_execute_agent(client: TestClient):
# Assert API is working
response = client.post("/agents/dummy_agent_1/execute")
assert response.status_code == 200
async def execute_node(queue: ExecutionQueue) -> dict | None:
next_exec = await executor.execute_node(queue.get())
if not next_exec:
return None
await add_execution(next_exec, queue)
return next_exec.data
# Assert response is correct
data = response.json()
exec_id = data["execution_id"]
agent_id = data["agent_id"]
assert agent_id == "dummy_agent_1"
assert isinstance(exec_id, str)
assert len(exec_id) == 36
# TODO: Add assertion that the executor is executed after some time
# Add this when db integration is done.
@pytest.mark.asyncio
async def test_agent_execution():
await db.connect()
test_graph = await create_test_graph()
test_queue = ExecutionQueue()
test_server = server.AgentServer(test_queue)
# --- Test adding new executions --- #
text = "Hello, World!"
input_data = {"input": text}
executions = await test_server.execute_agent(test_graph.id, input_data)
# 2 executions should be created, one for each ParrotBlock, with same run_id.
assert len(executions) == 2
assert executions[0].run_id == executions[1].run_id
assert executions[0].node_id != executions[1].node_id
assert executions[0].data == executions[1].data == input_data
# --- Test Executing added tasks --- #
# Executing ParrotBlock1, TextCombinerBlock won't be enqueued yet.
assert not test_queue.empty()
next_execution = await execute_node(test_queue)
assert next_execution is None
# Executing ParrotBlock2, TextCombinerBlock will be enqueued.
assert not test_queue.empty()
next_execution = await execute_node(test_queue)
assert test_queue.empty()
assert next_execution
assert next_execution.keys() == {"text1", "text2", "format"}
assert next_execution["text1"] == text
assert next_execution["text2"] == text
assert next_execution["format"] == "{text1},{text2}"
# Executing TextCombinerBlock, PrintingBlock will be enqueued.
next_execution = await execute_node(test_queue)
assert next_execution
assert next_execution.keys() == {"text"}
assert next_execution["text"] == f"{text},{text}"
# Executing PrintingBlock, no more tasks will be enqueued.
next_execution = await execute_node(test_queue)
assert next_execution is None