Updating to version v0.4 of the Protocol (#18)

This commit is contained in:
Swifty
2023-08-24 17:50:06 +02:00
committed by GitHub
parent 19ee68bcd9
commit 5db1a93257
25 changed files with 1519 additions and 938 deletions

10
.gitignore vendored
View File

@@ -161,7 +161,11 @@ openai/
# news
CURRENT_BULLETIN.md
# agbenchmark
agbenchmark
agent.db
*.sqlite
.agbench
.agbenchmark
.benchmarks
.mypy_cache
.pytest_cache
.vscode

40
Dockerfile Normal file
View File

@@ -0,0 +1,40 @@
# Use an official Python runtime as a parent image
FROM python:3.11-slim-buster as base
# Set work directory in the container
WORKDIR /app
# Install system dependencies
RUN apt-get update \
&& apt-get install -y build-essential curl ffmpeg \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install Poetry - respects $POETRY_VERSION & $POETRY_HOME
ENV POETRY_VERSION=1.1.8 \
POETRY_HOME="/opt/poetry" \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false \
PATH="$POETRY_HOME/bin:$PATH"
RUN pip3 install poetry
COPY pyproject.toml poetry.lock* /app/
# Project initialization:
RUN poetry install --no-interaction --no-ansi
ENV PYTHONPATH="/app:$PYTHONPATH"
FROM base as dependencies
# Copy project
COPY . /app
# Make port 80 available to the world outside this container
EXPOSE 8000
# Run the application when the container launches
CMD ["poetry", "run", "python", "autogpt/__main__.py"]

View File

@@ -1,7 +1,5 @@
MIT License
Copyright (c) 2023 Toran Bruce Richards
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
@@ -18,4 +16,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SOFTWARE.

View File

@@ -2,22 +2,32 @@ import os
from dotenv import load_dotenv
import autogpt.agent
import autogpt.db
from autogpt.benchmark_integration import add_benchmark_routes
from autogpt.workspace import LocalWorkspace
load_dotenv()
import autogpt.forge_log
ENABLE_TRACING = os.environ.get("ENABLE_TRACING", "false").lower() == "true"
autogpt.forge_log.setup_logger()
LOG = autogpt.forge_log.CustomLogger(__name__)
if __name__ == "__main__":
"""Runs the agent server"""
load_dotenv()
# modules are imported here so that logging is setup first
import autogpt.agent
import autogpt.db
from autogpt.benchmark_integration import add_benchmark_routes
from autogpt.workspace import LocalWorkspace
router = add_benchmark_routes()
database_name = os.getenv("DATABASE_STRING")
workspace = LocalWorkspace(os.getenv("AGENT_WORKSPACE"))
print(database_name)
port = os.getenv("PORT")
database = autogpt.db.AgentDB(database_name)
database = autogpt.db.AgentDB(database_name, debug_enabled=True)
agent = autogpt.agent.Agent(database=database, workspace=workspace)
agent.start(port=port, router=router)

View File

@@ -1,18 +1,22 @@
import asyncio
import os
import typing
from fastapi import APIRouter, FastAPI, Response, UploadFile
from fastapi import APIRouter, FastAPI, HTTPException, Response, UploadFile
from fastapi.responses import FileResponse, JSONResponse
from hypercorn.asyncio import serve
from hypercorn.config import Config
from prometheus_fastapi_instrumentator import Instrumentator
from .db import AgentDB
from .forge_log import CustomLogger
from .middlewares import AgentMiddleware
from .routes.agent_protocol import base_router
from .schema import Artifact, Status, Step, StepRequestBody, Task, TaskRequestBody
from .schema import *
from .tracing import setup_tracing
from .utils import run
from .workspace import Workspace
from .workspace import Workspace, load_from_uri
LOG = CustomLogger(__name__)
class Agent:
@@ -31,8 +35,22 @@ class Agent:
description="Modified version of The Agent Protocol.",
version="v0.4",
)
# Add Prometheus metrics to the agent
# https://github.com/trallnag/prometheus-fastapi-instrumentator
instrumentator = Instrumentator().instrument(app)
@app.on_event("startup")
async def _startup():
instrumentator.expose(app)
app.include_router(router)
app.add_middleware(AgentMiddleware, agent=self)
setup_tracing(app)
config.loglevel = "ERROR"
config.bind = [f"0.0.0.0:{port}"]
LOG.info(f"Agent server starting on {config.bind}")
asyncio.run(serve(app, config))
async def create_task(self, task_request: TaskRequestBody) -> Task:
@@ -46,21 +64,21 @@ class Agent:
if task_request.additional_input
else None,
)
print(task)
LOG.info(task.json())
except Exception as e:
return Response(status_code=500, content=str(e))
print(task)
return task
async def list_tasks(self) -> typing.List[str]:
async def list_tasks(self, page: int = 1, pageSize: int = 10) -> TaskListResponse:
"""
List the IDs of all tasks that the agent has created.
List all tasks that the agent has created.
"""
try:
task_ids = [task.task_id for task in await self.db.list_tasks()]
tasks, pagination = await self.db.list_tasks(page, pageSize)
response = TaskListResponse(tasks=tasks, pagination=pagination)
return Response(content=response.json(), media_type="application/json")
except Exception as e:
return Response(status_code=500, content=str(e))
return task_ids
raise HTTPException(status_code=500, detail=str(e))
async def get_task(self, task_id: str) -> Task:
"""
@@ -76,7 +94,9 @@ class Agent:
return Response(status_code=500, content=str(e))
return task
async def list_steps(self, task_id: str) -> typing.List[str]:
async def list_steps(
self, task_id: str, page: int = 1, pageSize: int = 10
) -> TaskStepsListResponse:
"""
List the IDs of all steps that the task has created.
"""
@@ -85,10 +105,11 @@ class Agent:
if not isinstance(task_id, str):
return Response(status_code=400, content="Task ID must be a string.")
try:
steps_ids = [step.step_id for step in await self.db.list_steps(task_id)]
steps, pagination = await self.db.list_steps(task_id, page, pageSize)
response = TaskStepsListResponse(steps=steps, pagination=pagination)
return Response(content=response.json(), media_type="application/json")
except Exception as e:
return Response(status_code=500, content=str(e))
return steps_ids
raise HTTPException(status_code=500, detail=str(e))
async def create_and_execute_step(
self, task_id: str, step_request: StepRequestBody
@@ -120,12 +141,18 @@ class Agent:
step.artifacts.append(art)
step.status = "completed"
else:
steps = await self.db.list_steps(task_id)
artifacts = await self.db.list_artifacts(task_id)
steps, steps_pagination = await self.db.list_steps(
task_id, page=1, per_page=100
)
artifacts, artifacts_pagination = await self.db.list_artifacts(
task_id, page=1, per_page=100
)
step = steps[-1]
step.artifacts = artifacts
step.output = "No more steps to run."
step.is_last = True
# The step is the last step on this page so checking if this is the
# last page is sufficent to know if it is the last step
step.is_last = steps_pagination.current_page == steps_pagination.total_pages
if isinstance(step.status, Status):
step.status = step.status.value
step.output = "Done some work"
@@ -149,7 +176,9 @@ class Agent:
return Response(status_code=500, content=str(e))
return step
async def list_artifacts(self, task_id: str) -> typing.List[Artifact]:
async def list_artifacts(
self, task_id: str, page: int = 1, pageSize: int = 10
) -> TaskArtifactsListResponse:
"""
List the artifacts that the task has created.
"""
@@ -158,10 +187,13 @@ class Agent:
if not isinstance(task_id, str):
return Response(status_code=400, content="Task ID must be a string.")
try:
artifacts = await self.db.list_artifacts(task_id)
artifacts, pagination = await self.db.list_artifacts(
task_id, page, pageSize
)
except Exception as e:
return Response(status_code=500, content=str(e))
return artifacts
raise HTTPException(status_code=500, detail=str(e))
response = TaskArtifactsListResponse(artifacts=artifacts, pagination=pagination)
return Response(content=response.json(), media_type="application/json")
async def create_artifact(
self,
@@ -183,10 +215,16 @@ class Agent:
data += contents
except Exception as e:
return Response(status_code=500, content=str(e))
else:
try:
data = await load_from_uri(uri, task_id)
file_name = uri.split("/")[-1]
except Exception as e:
return Response(status_code=500, content=str(e))
file_path = os.path.join(task_id / file_name)
self.write(file_path, data)
self.db.save_artifact(task_id, artifact)
file_path = os.path.join(task_id / file_name)
self.write(file_path, data)
self.db.save_artifact(task_id, artifact)
artifact = await self.create_artifact(
task_id=task_id,
@@ -201,18 +239,20 @@ class Agent:
"""
Get an artifact by ID.
"""
artifact = await self.db.get_artifact(task_id, artifact_id)
if not artifact.uri.startswith("file://"):
return Response(
status_code=500, content="Loading from none file uri not implemented"
)
file_path = artifact.uri.split("file://")[1]
if not self.workspace.exists(file_path):
return Response(status_code=500, content="File not found")
retrieved_artifact = self.workspace.read(file_path)
try:
artifact = await self.db.get_artifact(task_id, artifact_id)
except Exception as e:
return Response(status_code=500, content=str(e))
try:
retrieved_artifact = await self.load_from_uri(artifact.uri, artifact_id)
except Exception as e:
return Response(status_code=500, content=str(e))
path = artifact.file_name
with open(path, "wb") as f:
f.write(retrieved_artifact)
try:
with open(path, "wb") as f:
f.write(retrieved_artifact)
except Exception as e:
return Response(status_code=500, content=str(e))
return FileResponse(
# Note: mimetype is guessed in the FileResponse constructor
path=path,

View File

@@ -1,21 +0,0 @@
# Autogpt Protocol Directory
# DO NOT MODIFY ANY FILES IN THIS DIRECTORY
This directory contains protocol definitions crucial for our project. The current setup is a temporary measure to allow for speedy updating of the protocol.
## Background
In an ideal scenario, we'd directly use a submodule pointing to the original repository. However, given our specific needs and to expedite our development process, we've chosen a slightly different approach.
## Process
1. **Fork and Clone**: We started by forking the original repository `e2b-dev/agent-protocol` (not `Swiftyos/agent-protocol` as previously mentioned) to have our own version. This allows us to have more control over updates and possibly any specific changes that our project might need in the future.
2. **Manual Content Integration**: Instead of adding the entire forked repository as a submodule, we've manually copied over the contents of `sdk/python/agent_protocol` into this directory. This ensures we only have the parts we need, without any additional overhead.
3. **Updates**: Any necessary updates to the protocol can be made directly in our fork, and subsequently, the required changes can be reflected in this directory.
## Credits
All credit for the original protocol definitions goes to [e2b-dev/agent-protocol](https://github.com/e2b-dev/agent-protocol). We deeply appreciate their efforts in building the protocol, and this temporary measure is in no way intended to diminish the significance of their work. It's purely a practical approach for our specific requirements at this point in our development phase.

View File

@@ -1,16 +0,0 @@
from .agent import Agent
from .agent import base_router as router
from .db import Step, Task, TaskDB
from .models import Artifact, Status, StepRequestBody, TaskRequestBody
__all__ = [
"Agent",
"Artifact",
"Status",
"Step",
"StepRequestBody",
"Task",
"TaskDB",
"TaskRequestBody",
"router",
]

View File

@@ -1,233 +0,0 @@
import asyncio
from typing import List
from uuid import uuid4
from fastapi import APIRouter, Request, Response, UploadFile
from fastapi.responses import FileResponse, JSONResponse, Response
from hypercorn.asyncio import serve
from hypercorn.config import Config
from .db import Step, Task, TaskDB
from .middlewares import AgentMiddleware
from .models import Artifact, Status, StepRequestBody, TaskRequestBody
from .server import app
base_router = APIRouter()
@base_router.get("/heartbeat")
async def heartbeat() -> Response:
"""
Heartbeat endpoint to check if the server is running.
"""
return Response(status_code=200)
@base_router.post("/agent/tasks", response_model=Task, tags=["agent"])
async def create_agent_task(
request: Request, body: TaskRequestBody | None = None
) -> Task:
"""
Creates a task for the agent.
"""
agent: Agent = request["agent"]
task = await agent.db.create_task(
input=body.input if body else None,
additional_input=body.additional_input if body else None,
)
print(task)
await agent.create_task(task)
return task
@base_router.get("/agent/tasks", response_model=List[str], tags=["agent"])
async def list_agent_tasks_ids(request: Request) -> List[str]:
"""
List all tasks that have been created for the agent.
"""
agent: Agent = request["agent"]
return [task.task_id for task in await agent.db.list_tasks()]
@base_router.get("/agent/tasks/{task_id}", response_model=Task, tags=["agent"])
async def get_agent_task(request: Request, task_id: str) -> Task:
"""
Get details about a specified agent task.
"""
agent: Agent = request["agent"]
return await agent.db.get_task(task_id)
@base_router.get(
"/agent/tasks/{task_id}/steps",
response_model=List[str],
tags=["agent"],
)
async def list_agent_task_steps(request: Request, task_id: str) -> List[str]:
"""
List all steps for the specified task.
"""
agent: Agent = request["agent"]
task = await agent.db.get_task(task_id)
return [s.step_id for s in task.steps]
@base_router.post(
"/agent/tasks/{task_id}/steps",
response_model=Step,
tags=["agent"],
)
async def execute_agent_task_step(
request: Request,
task_id: str,
body: StepRequestBody | None = None,
) -> Step:
"""
Execute a step in the specified agent task.
"""
agent: Agent = request["agent"]
if body.input != "y":
step = await agent.db.create_step(
task_id=task_id,
input=body.input if body else None,
additional_properties=body.additional_input if body else None,
)
step = await agent.run_step(step)
step.output = "Task completed"
step.is_last = True
else:
steps = await agent.db.list_steps(task_id)
artifacts = await agent.db.list_artifacts(task_id)
step = steps[-1]
step.artifacts = artifacts
step.output = "No more steps to run."
step.is_last = True
if isinstance(step.status, Status):
step.status = step.status.value
return JSONResponse(content=step.dict(), status_code=200)
@base_router.get(
"/agent/tasks/{task_id}/steps/{step_id}",
response_model=Step,
tags=["agent"],
)
async def get_agent_task_step(request: Request, task_id: str, step_id: str) -> Step:
"""
Get details about a specified task step.
"""
agent: Agent = request["agent"]
return await agent.db.get_step(task_id, step_id)
@base_router.get(
"/agent/tasks/{task_id}/artifacts",
response_model=List[Artifact],
tags=["agent"],
)
async def list_agent_task_artifacts(request: Request, task_id: str) -> List[Artifact]:
"""
List all artifacts for the specified task.
"""
agent: Agent = request["agent"]
task = await agent.db.get_task(task_id)
return task.artifacts
@base_router.post(
"/agent/tasks/{task_id}/artifacts",
response_model=Artifact,
tags=["agent"],
)
async def upload_agent_task_artifacts(
request: Request,
task_id: str,
file: UploadFile | None = None,
uri: str | None = None,
) -> Artifact:
"""
Upload an artifact for the specified task.
"""
agent: Agent = request["agent"]
if not file and not uri:
return Response(status_code=400, content="No file or uri provided")
data = None
if not uri:
file_name = file.filename or str(uuid4())
try:
data = b""
while contents := file.file.read(1024 * 1024):
data += contents
except Exception as e:
return Response(status_code=500, content=str(e))
artifact = await agent.save_artifact(task_id, artifact, data)
agent.db.add_artifact(task_id, artifact)
return artifact
@base_router.get(
"/agent/tasks/{task_id}/artifacts/{artifact_id}",
tags=["agent"],
)
async def download_agent_task_artifacts(
request: Request, task_id: str, artifact_id: str
) -> FileResponse:
"""
Download the specified artifact.
"""
agent: Agent = request["agent"]
artifact = await agent.db.get_artifact(task_id, artifact_id)
retrieved_artifact: Artifact = await agent.retrieve_artifact(task_id, artifact)
path = artifact.file_name
with open(path, "wb") as f:
f.write(retrieved_artifact)
return FileResponse(
# Note: mimetype is guessed in the FileResponse constructor
path=path,
filename=artifact.file_name,
)
class Agent:
def __init__(self, db: TaskDB):
self.name = self.__class__.__name__
self.db = db
def start(self, port: int = 8000, router: APIRouter = base_router):
"""
Start the agent server.
"""
config = Config()
config.bind = [f"localhost:{port}"] # As an example configuration setting
app.title = f"{self.name} - Agent Communication Protocol"
app.include_router(router)
app.add_middleware(AgentMiddleware, agent=self)
asyncio.run(serve(app, config))
async def create_task(self, task: Task):
"""
Handles a new task
"""
return task
async def run_step(self, step: Step) -> Step:
return step
async def retrieve_artifact(self, task_id: str, artifact: Artifact) -> bytes:
"""
Retrieve the artifact data from wherever it is stored and return it as bytes.
"""
raise NotImplementedError("")
async def save_artifact(
self, task_id: str, artifact: Artifact, data: bytes | None = None
) -> Artifact:
"""
Save the artifact data to the agent's workspace, loading from uri if bytes are not available.
"""
raise NotImplementedError()

View File

@@ -1,173 +0,0 @@
import uuid
from abc import ABC
from typing import Any, Dict, List, Optional
from .models import Artifact, Status
from .models import Step as APIStep
from .models import Task as APITask
class Step(APIStep):
additional_properties: Optional[Dict[str, str]] = None
class Task(APITask):
steps: Optional[List[Step]] = []
class NotFoundException(Exception):
"""
Exception raised when a resource is not found.
"""
def __init__(self, item_name: str, item_id: str):
self.item_name = item_name
self.item_id = item_id
super().__init__(f"{item_name} with {item_id} not found.")
class TaskDB(ABC):
async def create_task(
self, input: Optional[str], additional_input: Any = None
) -> Task:
raise NotImplementedError
async def create_step(
self,
task_id: str,
name: Optional[str] = None,
input: Optional[str] = None,
is_last: bool = False,
additional_properties: Optional[Dict[str, str]] = None,
) -> Step:
raise NotImplementedError
async def create_artifact(
self,
task_id: str,
artifact: Artifact,
step_id: str | None = None,
) -> Artifact:
raise NotImplementedError
async def get_task(self, task_id: str) -> Task:
raise NotImplementedError
async def get_step(self, task_id: str, step_id: str) -> Step:
raise NotImplementedError
async def get_artifact(self, task_id: str, artifact_id: str) -> Artifact:
raise NotImplementedError
async def list_tasks(self) -> List[Task]:
raise NotImplementedError
async def list_steps(
self, task_id: str, status: Optional[Status] = None
) -> List[Step]:
raise NotImplementedError
async def update_step(
self,
task_id: str,
step_id: str,
status: str,
additional_properties: Optional[Dict[str, str]] = None,
) -> Step:
raise NotImplementedError
class InMemoryTaskDB(TaskDB):
_tasks: Dict[str, Task] = {}
async def create_task(
self,
input: Optional[str],
additional_input: Any = None,
) -> Task:
task_id = str(uuid.uuid4())
task = Task(
task_id=task_id,
input=input,
steps=[],
artifacts=[],
additional_input=additional_input,
)
self._tasks[task_id] = task
return task
async def create_step(
self,
task_id: str,
name: Optional[str] = None,
input: Optional[str] = None,
is_last=False,
additional_properties: Optional[Dict[str, Any]] = None,
) -> Step:
step_id = str(uuid.uuid4())
step = Step(
task_id=task_id,
step_id=step_id,
name=name,
input=input,
status=Status.created,
is_last=is_last,
additional_properties=additional_properties,
)
task = await self.get_task(task_id)
task.steps.append(step)
return step
async def get_task(self, task_id: str) -> Task:
task = self._tasks.get(task_id, None)
if not task:
raise NotFoundException("Task", task_id)
return task
async def get_step(self, task_id: str, step_id: str) -> Step:
task = await self.get_task(task_id)
step = next(filter(lambda s: s.task_id == task_id, task.steps), None)
if not step:
raise NotFoundException("Step", step_id)
return step
async def get_artifact(self, task_id: str, artifact_id: str) -> Artifact:
task = await self.get_task(task_id)
artifact = next(
filter(lambda a: a.artifact_id == artifact_id, task.artifacts), None
)
if not artifact:
raise NotFoundException("Artifact", artifact_id)
return artifact
async def create_artifact(
self,
task_id: str,
file_name: str,
relative_path: Optional[str] = None,
step_id: Optional[str] = None,
) -> Artifact:
artifact_id = str(uuid.uuid4())
artifact = Artifact(
artifact_id=artifact_id, file_name=file_name, relative_path=relative_path
)
task = await self.get_task(task_id)
task.artifacts.append(artifact)
if step_id:
step = await self.get_step(task_id, step_id)
step.artifacts.append(artifact)
return artifact
async def list_tasks(self) -> List[Task]:
return [task for task in self._tasks.values()]
async def list_steps(
self, task_id: str, status: Optional[Status] = None
) -> List[Step]:
task = await self.get_task(task_id)
steps = task.steps
if status:
steps = list(filter(lambda s: s.status == status, steps))
return steps

View File

@@ -1,46 +0,0 @@
from fastapi import FastAPI, Request
from fastapi.responses import PlainTextResponse
from .db import NotFoundException
async def not_found_exception_handler(
request: Request, exc: NotFoundException
) -> PlainTextResponse:
return PlainTextResponse(
str(exc),
status_code=404,
)
class AgentMiddleware:
"""
Middleware that injects the agent instance into the request scope.
"""
def __init__(self, app: FastAPI, agent: "Agent"):
"""
Args:
app: The FastAPI app - automatically injected by FastAPI.
agent: The agent instance to inject into the request scope.
Examples:
>>> from fastapi import FastAPI, Request
>>> from agent_protocol.agent import Agent
>>> from agent_protocol.middlewares import AgentMiddleware
>>> app = FastAPI()
>>> @app.get("/")
>>> async def root(request: Request):
>>> agent = request["agent"]
>>> task = agent.db.create_task("Do something.")
>>> return {"task_id": a.task_id}
>>> agent = Agent()
>>> app.add_middleware(AgentMiddleware, agent=agent)
"""
self.app = app
self.agent = agent
async def __call__(self, scope, receive, send):
scope["agent"] = self.agent
await self.app(scope, receive, send)

View File

@@ -1,131 +0,0 @@
# generated by fastapi-codegen:
# filename: ../../openapi.yml
# timestamp: 2023-08-17T11:26:07+00:00
from __future__ import annotations
from enum import Enum
from typing import Any, List, Optional
from pydantic import BaseModel, Field
class TaskInput(BaseModel):
__root__: Any = Field(
...,
description="Input parameters for the task. Any value is allowed.",
example='{\n"debug": false,\n"mode": "benchmarks"\n}',
)
class Artifact(BaseModel):
artifact_id: str = Field(
...,
description="ID of the artifact.",
example="b225e278-8b4c-4f99-a696-8facf19f0e56",
)
file_name: str = Field(
..., description="Filename of the artifact.", example="main.py"
)
agent_created: Optional[bool] = Field(
None,
description="Whether the artifact has been created by the agent.",
example=False,
)
uri: Optional[str] = Field(
None,
description="URI of the artifact.",
example="file://home/bob/workspace/bucket/main.py",
)
class ArtifactUpload(BaseModel):
file: bytes = Field(..., description="File to upload.")
relative_path: Optional[str] = Field(
None,
description="Relative path of the artifact in the agent's workspace.",
example="python/code",
)
class StepInput(BaseModel):
__root__: Any = Field(
...,
description="Input parameters for the task step. Any value is allowed.",
example='{\n"file_to_refactor": "models.py"\n}',
)
class StepOutput(BaseModel):
__root__: Any = Field(
...,
description="Output that the task step has produced. Any value is allowed.",
example='{\n"tokens": 7894,\n"estimated_cost": "0,24$"\n}',
)
class TaskRequestBody(BaseModel):
input: Optional[str] = Field(
None,
description="Input prompt for the task.",
example="Write the words you receive to the file 'output.txt'.",
)
additional_input: Optional[TaskInput] = None
class Task(TaskRequestBody):
task_id: str = Field(
...,
description="The ID of the task.",
example="50da533e-3904-4401-8a07-c49adf88b5eb",
)
artifacts: Optional[List[Artifact]] = Field(
[],
description="A list of artifacts that the task has produced.",
example=[
"7a49f31c-f9c6-4346-a22c-e32bc5af4d8e",
"ab7b4091-2560-4692-a4fe-d831ea3ca7d6",
],
)
class StepRequestBody(BaseModel):
input: Optional[str] = Field(
None, description="Input prompt for the step.", example="Washington"
)
additional_input: Optional[StepInput] = None
class Status(Enum):
created = "created"
running = "running"
completed = "completed"
class Step(StepRequestBody):
task_id: str = Field(
...,
description="The ID of the task this step belongs to.",
example="50da533e-3904-4401-8a07-c49adf88b5eb",
)
step_id: str = Field(
...,
description="The ID of the task step.",
example="6bb1801a-fd80-45e8-899a-4dd723cc602e",
)
name: Optional[str] = Field(
None, description="The name of the task step.", example="Write to file"
)
status: Status = Field(..., description="The status of the task step.")
output: Optional[str] = Field(
None,
description="Output of the task step.",
example="I am going to use the write_to_file command and write Washington to a file called output.txt <write_to_file('output.txt', 'Washington')",
)
additional_output: Optional[StepOutput] = None
artifacts: Optional[List[Artifact]] = Field(
[], description="A list of artifacts that the step has produced."
)
is_last: Optional[bool] = Field(
False, description="Whether this is the last step in the task."
)

View File

@@ -1,18 +0,0 @@
# generated by fastapi-codegen:
# filename: openapi.yml
# timestamp: 2023-08-07T12:14:43+00:00
from __future__ import annotations
from fastapi import FastAPI
from .db import NotFoundException
from .middlewares import not_found_exception_handler
app = FastAPI(
title="Agent Communication Protocol",
description="Specification of the API protocol for communication with an agent.",
version="v0.3",
)
app.add_exception_handler(NotFoundException, not_found_exception_handler)

View File

@@ -4,12 +4,16 @@ It uses SQLite as the database and file store backend.
IT IS NOT ADVISED TO USE THIS IN PRODUCTION!
"""
from typing import Dict, List, Optional
import math
from typing import Dict, List, Optional, Tuple
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.orm import DeclarativeBase, joinedload, relationship, sessionmaker
from .schema import Artifact, Status, Step, Task, TaskInput
from .forge_log import CustomLogger
from .schema import Artifact, Pagination, Status, Step, Task, TaskInput
LOG = CustomLogger(__name__)
class Base(DeclarativeBase):
@@ -58,17 +62,30 @@ class ArtifactModel(Base):
task = relationship("TaskModel", back_populates="artifacts")
def convert_to_task(task_obj: TaskModel) -> Task:
def convert_to_task(task_obj: TaskModel, debug_enabled: bool = False) -> Task:
if debug_enabled:
LOG.debug(f"Converting TaskModel to Task for task_id: {task_obj.task_id}")
task_artifacts = [
Artifact(
artifact_id=artifact.artifact_id,
file_name=artifact.file_name,
agent_created=artifact.agent_created,
uri=artifact.uri,
)
for artifact in task_obj.artifacts
if artifact.task_id == task_obj.task_id
]
return Task(
task_id=task_obj.task_id,
input=task_obj.input,
additional_input=task_obj.additional_input,
artifacts=[],
artifacts=task_artifacts,
)
def convert_to_step(step_model: StepModel) -> Step:
print(step_model)
def convert_to_step(step_model: StepModel, debug_enabled: bool = False) -> Step:
if debug_enabled:
LOG.debug(f"Converting StepModel to Step for step_id: {step_model.step_id}")
step_artifacts = [
Artifact(
artifact_id=artifact.artifact_id,
@@ -94,37 +111,48 @@ def convert_to_step(step_model: StepModel) -> Step:
# sqlite:///{database_name}
class AgentDB:
def __init__(self, database_string) -> None:
def __init__(self, database_string, debug_enabled: bool = False) -> None:
super().__init__()
self.debug_enabled = debug_enabled
if self.debug_enabled:
LOG.debug(f"Initializing AgentDB with database_string: {database_string}")
self.engine = create_engine(database_string)
Base.metadata.create_all(self.engine)
self.Session = sessionmaker(bind=self.engine)
print("Databases Created")
async def create_task(self, input: Optional[str], additional_input: Optional[TaskInput] = None) -> Task:
async def create_task(
self, input: Optional[str], additional_input: Optional[TaskInput] = None
) -> Task:
if self.debug_enabled:
LOG.debug("Creating new task")
with self.Session() as session:
new_task = TaskModel(
input=input,
additional_input=additional_input.__root__ if additional_input else None,
additional_input=additional_input.__root__
if additional_input
else None,
)
session.add(new_task)
session.commit()
session.refresh(new_task)
return convert_to_task(new_task)
if self.debug_enabled:
LOG.debug(f"Created new task with task_id: {new_task.task_id}")
return convert_to_task(new_task, self.debug_enabled)
async def create_step(
self,
task_id: str,
name: Optional[str] = None,
input: Optional[str] = None,
input: str,
is_last: bool = False,
additional_properties: Optional[Dict[str, str]] = None,
) -> Step:
if self.debug_enabled:
LOG.debug(f"Creating new step for task_id: {task_id}")
try:
session = self.Session()
new_step = StepModel(
task_id=task_id,
name=name,
name=input,
input=input,
status="created",
is_last=is_last,
@@ -133,9 +161,11 @@ class AgentDB:
session.add(new_step)
session.commit()
session.refresh(new_step)
if self.debug_enabled:
LOG.debug(f"Created new step with step_id: {new_step.step_id}")
except Exception as e:
print(e)
return convert_to_step(new_step)
LOG.error(f"Error while creating step: {e}")
return convert_to_step(new_step, self.debug_enabled)
async def create_artifact(
self,
@@ -145,10 +175,14 @@ class AgentDB:
agent_created: bool = False,
step_id: str | None = None,
) -> Artifact:
if self.debug_enabled:
LOG.debug(f"Creating new artifact for task_id: {task_id}")
session = self.Session()
if existing_artifact := session.query(ArtifactModel).filter_by(uri=uri).first():
session.close()
if self.debug_enabled:
LOG.debug(f"Artifact already exists with uri: {uri}")
return Artifact(
artifact_id=str(existing_artifact.artifact_id),
file_name=existing_artifact.file_name,
@@ -166,6 +200,10 @@ class AgentDB:
session.add(new_artifact)
session.commit()
session.refresh(new_artifact)
if self.debug_enabled:
LOG.debug(
f"Created new artifact with artifact_id: {new_artifact.artifact_id}"
)
return Artifact(
artifact_id=str(new_artifact.artifact_id),
file_name=new_artifact.file_name,
@@ -175,6 +213,8 @@ class AgentDB:
async def get_task(self, task_id: int) -> Task:
"""Get a task by its id"""
if self.debug_enabled:
LOG.debug(f"Getting task with task_id: {task_id}")
session = self.Session()
if task_obj := (
session.query(TaskModel)
@@ -182,11 +222,14 @@ class AgentDB:
.filter_by(task_id=task_id)
.first()
):
return convert_to_task(task_obj)
return convert_to_task(task_obj, self.debug_enabled)
else:
LOG.error(f"Task not found with task_id: {task_id}")
raise DataNotFoundError("Task not found")
async def get_step(self, task_id: int, step_id: int) -> Step:
if self.debug_enabled:
LOG.debug(f"Getting step with task_id: {task_id} and step_id: {step_id}")
session = self.Session()
if step := (
session.query(StepModel)
@@ -194,9 +237,10 @@ class AgentDB:
.filter(StepModel.step_id == step_id)
.first()
):
return convert_to_step(step)
return convert_to_step(step, self.debug_enabled)
else:
LOG.error(f"Step not found with task_id: {task_id} and step_id: {step_id}")
raise DataNotFoundError("Step not found")
async def update_step(
@@ -206,6 +250,8 @@ class AgentDB:
status: str,
additional_properties: Optional[Dict[str, str]] = None,
) -> Step:
if self.debug_enabled:
LOG.debug(f"Updating step with task_id: {task_id} and step_id: {step_id}")
session = self.Session()
if (
step := session.query(StepModel)
@@ -217,54 +263,91 @@ class AgentDB:
session.commit()
return await self.get_step(task_id, step_id)
else:
LOG.error(
f"Step not found for update with task_id: {task_id} and step_id: {step_id}"
)
raise DataNotFoundError("Step not found")
async def get_artifact(self, task_id: str, artifact_id: str) -> Artifact:
if self.debug_enabled:
LOG.debug(
f"Getting artifact with task_id: {task_id} and artifact_id: {artifact_id}"
)
session = self.Session()
if artifact_model := (
session.query(ArtifactModel)
.filter_by(task_id=task_id, artifact_id=artifact_id)
.filter_by(task_id=int(task_id), artifact_id=int(artifact_id))
.first()
):
return Artifact(
artifact_id=str(artifact_model.artifact_id), # Casting to string
artifact_id=artifact_model.artifact_id, # Casting to string
file_name=artifact_model.file_name,
agent_created=artifact_model.agent_created,
uri=artifact_model.uri,
)
else:
LOG.error(
f"Artifact not found with task_id: {task_id} and artifact_id: {artifact_id}"
)
raise DataNotFoundError("Artifact not found")
async def list_tasks(self) -> List[Task]:
async def list_tasks(
self, page: int = 1, per_page: int = 10
) -> Tuple[List[Task], Pagination]:
if self.debug_enabled:
LOG.debug("Listing tasks")
session = self.Session()
tasks = session.query(TaskModel).all()
return [
Task(
task_id=task.task_id,
input=task.input,
additional_input=task.additional_input,
artifacts=[],
steps=[],
)
for task in tasks
]
tasks = (
session.query(TaskModel).offset((page - 1) * per_page).limit(per_page).all()
)
total = session.query(TaskModel).count()
pages = math.ceil(total / per_page)
pagination = Pagination(
total_items=total, total_pages=pages, current_page=page, page_size=per_page
)
return [convert_to_task(task, self.debug_enabled) for task in tasks], pagination
async def list_steps(self, task_id: str) -> List[Step]:
async def list_steps(
self, task_id: str, page: int = 1, per_page: int = 10
) -> Tuple[List[Step], Pagination]:
if self.debug_enabled:
LOG.debug(f"Listing steps for task_id: {task_id}")
session = self.Session()
steps = session.query(StepModel).filter_by(task_id=task_id).all()
return [
Step(
task_id=task_id,
step_id=step.step_id,
name=step.name,
status=step.status,
)
for step in steps
]
steps = (
session.query(StepModel)
.filter_by(task_id=task_id)
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
total = session.query(StepModel).filter_by(task_id=task_id).count()
pages = math.ceil(total / per_page)
pagination = Pagination(
total_items=total, total_pages=pages, current_page=page, page_size=per_page
)
return [convert_to_step(step, self.debug_enabled) for step in steps], pagination
async def list_artifacts(self, task_id: str) -> List[Artifact]:
async def list_artifacts(
self, task_id: str, page: int = 1, per_page: int = 10
) -> Tuple[List[Artifact], Pagination]:
if self.debug_enabled:
LOG.debug(f"Listing artifacts for task_id: {task_id}")
with self.Session() as session:
artifacts = session.query(ArtifactModel).filter_by(task_id=task_id).all()
artifacts = (
session.query(ArtifactModel)
.filter_by(task_id=task_id)
.offset((page - 1) * per_page)
.limit(per_page)
.all()
)
total = session.query(ArtifactModel).filter_by(task_id=task_id).count()
pages = math.ceil(total / per_page)
pagination = Pagination(
total_items=total,
total_pages=pages,
current_page=page,
page_size=per_page,
)
return [
Artifact(
artifact_id=str(artifact.artifact_id),
@@ -273,4 +356,4 @@ class AgentDB:
uri=artifact.uri,
)
for artifact in artifacts
]
], pagination

View File

@@ -46,9 +46,9 @@ async def test_create_task():
async def test_create_and_get_task():
db_name = "sqlite:///test_db.sqlite3"
agent_db = AgentDB(db_name)
await agent_db.create_task("task_input")
task = await agent_db.get_task(1)
assert task.input == "task_input"
task = await agent_db.create_task("test_input")
fetched_task = await agent_db.get_task(task.task_id)
assert fetched_task.input == "test_input"
os.remove(db_name.split("///")[1])
@@ -76,9 +76,9 @@ async def test_get_task_not_found():
async def test_create_and_get_step():
db_name = "sqlite:///test_db.sqlite3"
agent_db = AgentDB(db_name)
await agent_db.create_task("task_input")
await agent_db.create_step(1, "step_name")
step = await agent_db.get_step(1, 1)
task = await agent_db.create_task("task_input")
step = await agent_db.create_step(task.task_id, "step_name")
step = await agent_db.get_step(task.task_id, step.step_id)
assert step.name == "step_name"
os.remove(db_name.split("///")[1])
@@ -111,7 +111,7 @@ async def test_get_artifact():
db = AgentDB(db_name)
# Given: A task and its corresponding artifact
task = await db.create_task("test_input")
task = await db.create_task("test_input debug")
step = await db.create_step(task.task_id, "step_name")
# Create an artifact
@@ -124,10 +124,10 @@ async def test_get_artifact():
)
# When: The artifact is fetched by its ID
fetched_artifact = await db.get_artifact(int(task.task_id), artifact.artifact_id)
fetched_artifact = await db.get_artifact(task.task_id, artifact.artifact_id)
# Then: The fetched artifact matches the original
assert fetched_artifact.file_name == "sample_file.txt"
assert fetched_artifact.artifact_id == artifact.artifact_id
assert fetched_artifact.uri == "file:///path/to/sample_file.txt"
os.remove(db_name.split("///")[1])
@@ -143,7 +143,7 @@ async def test_list_tasks():
task2 = await db.create_task("test_input_2")
# When: All tasks are fetched
fetched_tasks = await db.list_tasks()
fetched_tasks, pagination = await db.list_tasks()
# Then: The fetched tasks list includes the created tasks
task_ids = [task.task_id for task in fetched_tasks]
@@ -163,10 +163,10 @@ async def test_list_steps():
step2 = await db.create_step(task.task_id, "step_2")
# When: All steps for the task are fetched
fetched_steps = await db.list_steps(task.task_id)
fetched_steps, pagination = await db.list_steps(task.task_id)
# Then: The fetched steps list includes the created steps
step_ids = [step.step_id for step in fetched_steps]
assert str(step1.step_id) in step_ids
assert str(step2.step_id) in step_ids
assert step1.step_id in step_ids
assert step2.step_id in step_ids
os.remove(db_name.split("///")[1])

204
autogpt/forge_log.py Normal file
View File

@@ -0,0 +1,204 @@
import json
import logging
import logging.config
import logging.handlers
import os
import queue
ENABLE_TRACING = os.environ.get("ENABLE_TRACING", "false").lower() == "true"
JSON_LOGGING = os.environ.get("JSON_LOGGING", "false").lower() == "true"
CHAT = 29
logging.addLevelName(CHAT, "CHAT")
RESET_SEQ: str = "\033[0m"
COLOR_SEQ: str = "\033[1;%dm"
BOLD_SEQ: str = "\033[1m"
UNDERLINE_SEQ: str = "\033[04m"
ORANGE: str = "\033[33m"
YELLOW: str = "\033[93m"
WHITE: str = "\33[37m"
BLUE: str = "\033[34m"
LIGHT_BLUE: str = "\033[94m"
RED: str = "\033[91m"
GREY: str = "\33[90m"
GREEN: str = "\033[92m"
EMOJIS: dict[str, str] = {
"DEBUG": "🐛",
"INFO": "📝",
"CHAT": "💬",
"WARNING": "⚠️",
"ERROR": "",
"CRITICAL": "💥",
}
KEYWORD_COLORS: dict[str, str] = {
"DEBUG": WHITE,
"INFO": LIGHT_BLUE,
"CHAT": GREEN,
"WARNING": YELLOW,
"ERROR": ORANGE,
"CRITICAL": RED,
}
class JsonFormatter(logging.Formatter):
def format(self, record):
return json.dumps(record.__dict__)
def formatter_message(message: str, use_color: bool = True) -> str:
"""
Syntax highlight certain keywords
"""
if use_color:
message = message.replace("$RESET", RESET_SEQ).replace("$BOLD", BOLD_SEQ)
else:
message = message.replace("$RESET", "").replace("$BOLD", "")
return message
def format_word(
message: str, word: str, color_seq: str, bold: bool = False, underline: bool = False
) -> str:
"""
Surround the fiven word with a sequence
"""
replacer = color_seq + word + RESET_SEQ
if underline:
replacer = UNDERLINE_SEQ + replacer
if bold:
replacer = BOLD_SEQ + replacer
return message.replace(word, replacer)
class ConsoleFormatter(logging.Formatter):
"""
This Formatted simply colors in the levelname i.e 'INFO', 'DEBUG'
"""
def __init__(
self, fmt: str, datefmt: str = None, style: str = "%", use_color: bool = True
):
super().__init__(fmt, datefmt, style)
self.use_color = use_color
def format(self, record: logging.LogRecord) -> str:
"""
Format and highlight certain keywords
"""
rec = record
levelname = rec.levelname
if self.use_color and levelname in KEYWORD_COLORS:
levelname_color = KEYWORD_COLORS[levelname] + levelname + RESET_SEQ
rec.levelname = levelname_color
rec.name = f"{GREY}{rec.name:<15}{RESET_SEQ}"
rec.msg = (
KEYWORD_COLORS[levelname] + EMOJIS[levelname] + " " + rec.msg + RESET_SEQ
)
return logging.Formatter.format(self, rec)
class CustomLogger(logging.Logger):
"""
This adds extra logging functions such as logger.trade and also
sets the logger to use the custom formatter
"""
CONSOLE_FORMAT: str = (
"[%(asctime)s] [$BOLD%(name)-15s$RESET] [%(levelname)-8s]\t%(message)s"
)
FORMAT: str = "%(asctime)s %(name)-15s %(levelname)-8s %(message)s"
COLOR_FORMAT: str = formatter_message(CONSOLE_FORMAT, True)
JSON_FORMAT: str = '{"time": "%(asctime)s", "name": "%(name)s", "level": "%(levelname)s", "message": "%(message)s"}'
def __init__(self, name: str, logLevel: str = "DEBUG"):
logging.Logger.__init__(self, name, logLevel)
# Queue Handler
queue_handler = logging.handlers.QueueHandler(queue.Queue(-1))
json_formatter = logging.Formatter(self.JSON_FORMAT)
queue_handler.setFormatter(json_formatter)
self.addHandler(queue_handler)
if JSON_LOGGING:
console_formatter = JsonFormatter()
else:
console_formatter = ConsoleFormatter(self.COLOR_FORMAT)
console = logging.StreamHandler()
console.setFormatter(console_formatter)
self.addHandler(console)
def chat(self, role: str, openai_repsonse: dict, messages=None, *args, **kws):
"""
Parse the content, log the message and extract the usage into prometheus metrics
"""
role_emojis = {
"system": "🖥️",
"user": "👤",
"assistant": "🤖",
"function": "⚙️",
}
if self.isEnabledFor(CHAT):
if messages:
for message in messages:
self._log(
CHAT,
f"{role_emojis.get(message['role'], '🔵')}: {message['content']}",
)
else:
response = json.loads(openai_repsonse)
self._log(
CHAT,
f"{role_emojis.get(role, '🔵')}: {response['choices'][0]['message']['content']}",
)
class QueueLogger(logging.Logger):
"""
Custom logger class with queue
"""
def __init__(self, name: str, level: int = logging.NOTSET):
super().__init__(name, level)
queue_handler = logging.handlers.QueueHandler(queue.Queue(-1))
self.addHandler(queue_handler)
logging_config: dict = dict(
version=1,
formatters={
"console": {
"()": ConsoleFormatter,
"format": CustomLogger.COLOR_FORMAT,
},
},
handlers={
"h": {
"class": "logging.StreamHandler",
"formatter": "console",
"level": logging.DEBUG,
},
},
root={
"handlers": ["h"],
"level": logging.WARNING,
},
loggers={
"autogpt": {
"handlers": ["h"],
"level": logging.DEBUG,
"propagate": False,
},
},
)
def setup_logger():
"""
Setup the logger with the specified format
"""
logging.config.dictConfig(logging_config)

View File

@@ -22,17 +22,35 @@ the ones that require special attention due to their complexity are:
Developers and contributors should be especially careful when making modifications to these routes to ensure
consistency and correctness in the system's behavior.
"""
from typing import List
from typing import Optional
from fastapi import APIRouter, Request, UploadFile
from fastapi import APIRouter, Query, Request, Response, UploadFile
from fastapi.responses import FileResponse
from autogpt.schema import Artifact, Step, StepRequestBody, Task, TaskRequestBody
from autogpt.schema import *
from autogpt.tracing import tracing
base_router = APIRouter()
@base_router.get("/", tags=["root"])
async def root():
"""
Root endpoint that returns a welcome message.
"""
return Response(content="Welcome to the Auto-GPT Forge")
@base_router.get("/heartbeat", tags=["server"])
async def check_server_status():
"""
Check if the server is running.
"""
return Response(content="Server is running.", status_code=200)
@base_router.post("/agent/tasks", tags=["agent"], response_model=Task)
@tracing("Creating new task", is_create_task=True)
async def create_agent_task(request: Request, task_request: TaskRequestBody) -> Task:
"""
Creates a new task using the provided TaskRequestBody and returns a Task.
@@ -61,38 +79,61 @@ async def create_agent_task(request: Request, task_request: TaskRequestBody) ->
}
"""
agent = request["agent"]
task_request = await agent.create_task(task_request)
return task_request
if task_request := await agent.create_task(task_request):
return task_request
else:
return Response(content={"error": "Task creation failed"}, status_code=400)
@base_router.get("/agent/tasks", tags=["agent"], response_model=List[str])
async def list_agent_tasks_ids(request: Request) -> List[str]:
@base_router.get("/agent/tasks", tags=["agent"], response_model=TaskListResponse)
async def list_agent_tasks(
request: Request,
page: Optional[int] = Query(1, ge=1),
page_size: Optional[int] = Query(10, ge=1, alias="pageSize"),
) -> TaskListResponse:
"""
Gets a list of all task IDs.
Retrieves a paginated list of all tasks.
Args:
request (Request): FastAPI request object.
page (int, optional): The page number for pagination. Defaults to 1.
page_size (int, optional): The number of tasks per page for pagination. Defaults to 10.
Returns:
List[str]: A list of all task IDs.
TaskListResponse: A response object containing a list of tasks and pagination details.
Example:
Request:
GET /agent/tasks
GET /agent/tasks?page=1&pageSize=10
Response:
[
"50da533e-3904-4401-8a07-c49adf88b5eb",
"b7d3c70a-7266-4b3a-818e-1327679f0117",
...
]
Response (TaskListResponse defined in schema.py):
{
"items": [
{
"input": "Write the word 'Washington' to a .txt file",
"additional_input": null,
"task_id": "50da533e-3904-4401-8a07-c49adf88b5eb",
"artifacts": [],
"steps": []
},
...
],
"pagination": {
"total": 100,
"pages": 10,
"current": 1,
"pageSize": 10
}
}
"""
agent = request["agent"]
return await agent.list_tasks()
return await agent.list_tasks(page, page_size)
@base_router.get("/agent/tasks/{task_id}", tags=["agent"], response_model=Task)
async def get_agent_task(request: Request, task_id: str):
@tracing("Getting task details")
async def get_agent_task(request: Request, task_id: str) -> Task:
"""
Gets the details of a task by ID.
@@ -144,35 +185,62 @@ async def get_agent_task(request: Request, task_id: str):
}
"""
agent = request["agent"]
return await agent.get_task(task_id)
task = await agent.get_task(task_id)
if task:
return task
else:
return Response(content={"error": "Task not found"}, status_code=404)
@base_router.get(
"/agent/tasks/{task_id}/steps", tags=["agent"], response_model=List[str]
"/agent/tasks/{task_id}/steps", tags=["agent"], response_model=TaskStepsListResponse
)
async def list_agent_task_steps(request: Request, task_id: str) -> List[str]:
async def list_agent_task_steps(
request: Request,
task_id: str,
page: Optional[int] = Query(1, ge=1),
page_size: Optional[int] = Query(10, ge=1, alias="pageSize"),
) -> TaskStepsListResponse:
"""
Retrieves a list of step IDs associated with a specific task.
Retrieves a paginated list of steps associated with a specific task.
Args:
request (Request): FastAPI request object.
task_id (str): The ID of the task.
page (int, optional): The page number for pagination. Defaults to 1.
page_size (int, optional): The number of steps per page for pagination. Defaults to 10.
Returns:
List[str]: A list of step IDs.
TaskStepsListResponse: A response object containing a list of steps and pagination details.
Example:
Request:
GET /agent/tasks/50da533e-3904-4401-8a07-c49adf88b5eb/steps
GET /agent/tasks/50da533e-3904-4401-8a07-c49adf88b5eb/steps?page=1&pageSize=10
Response:
["step1_id", "step2_id", ...]
Response (TaskStepsListResponse defined in schema.py):
{
"items": [
{
"task_id": "50da533e-3904-4401-8a07-c49adf88b5eb",
"step_id": "step1_id",
...
},
...
],
"pagination": {
"total": 100,
"pages": 10,
"current": 1,
"pageSize": 10
}
}
"""
agent = request["agent"]
return await agent.list_steps(task_id)
return await agent.list_steps(task_id, page, page_size)
@base_router.post("/agent/tasks/{task_id}/steps", tags=["agent"], response_model=Step)
@tracing("Creating and executing Step")
async def execute_agent_task_step(
request: Request, task_id: str, step: StepRequestBody
) -> Step:
@@ -222,6 +290,7 @@ async def execute_agent_task_step(
@base_router.get(
"/agent/tasks/{task_id}/steps/{step_id}", tags=["agent"], response_model=Step
)
@tracing("Getting Step Details")
async def get_agent_task_step(request: Request, task_id: str, step_id: str) -> Step:
"""
Retrieves the details of a specific step for a given task.
@@ -250,37 +319,56 @@ async def get_agent_task_step(request: Request, task_id: str, step_id: str) -> S
@base_router.get(
"/agent/tasks/{task_id}/artifacts", tags=["agent"], response_model=List[Artifact]
"/agent/tasks/{task_id}/artifacts",
tags=["agent"],
response_model=TaskArtifactsListResponse,
)
async def list_agent_task_artifacts(request: Request, task_id: str) -> List[Artifact]:
@tracing("Listing Task Artifacts")
async def list_agent_task_artifacts(
request: Request,
task_id: str,
page: Optional[int] = Query(1, ge=1),
page_size: Optional[int] = Query(10, ge=1, alias="pageSize"),
) -> TaskArtifactsListResponse:
"""
Retrieves a list of artifacts associated with a specific task.
Retrieves a paginated list of artifacts associated with a specific task.
Args:
request (Request): FastAPI request object.
task_id (str): The ID of the task.
page (int, optional): The page number for pagination. Defaults to 1.
page_size (int, optional): The number of items per page for pagination. Defaults to 10.
Returns:
List[Artifact]: A list of artifacts.
TaskArtifactsListResponse: A response object containing a list of artifacts and pagination details.
Example:
Request:
GET /agent/tasks/50da533e-3904-4401-8a07-c49adf88b5eb/artifacts
GET /agent/tasks/50da533e-3904-4401-8a07-c49adf88b5eb/artifacts?page=1&pageSize=10
Response:
[
{"artifact_id": "artifact1_id", ...},
{"artifact_id": "artifact2_id", ...},
...
]
Response (TaskArtifactsListResponse defined in schema.py):
{
"items": [
{"artifact_id": "artifact1_id", ...},
{"artifact_id": "artifact2_id", ...},
...
],
"pagination": {
"total": 100,
"pages": 10,
"current": 1,
"pageSize": 10
}
}
"""
agent = request["agent"]
return await agent.list_artifacts(task_id)
return await agent.list_artifacts(task_id, page, page_size)
@base_router.post(
"/agent/tasks/{task_id}/artifacts", tags=["agent"], response_model=Artifact
)
@tracing("Uploading task artifact")
async def upload_agent_task_artifacts(
request: Request,
task_id: str,
@@ -327,6 +415,7 @@ async def upload_agent_task_artifacts(
@base_router.get(
"/agent/tasks/{task_id}/artifacts/{artifact_id}", tags=["agent"], response_model=str
)
@tracing("Downloading task artifact")
async def download_agent_task_artifact(
request: Request, task_id: str, artifact_id: str
) -> FileResponse:
@@ -349,5 +438,4 @@ async def download_agent_task_artifact(
<file_content_of_artifact>
"""
agent = request["agent"]
print(f"task_id: {task_id}, artifact_id: {artifact_id}")
return await agent.get_artifact(task_id, artifact_id)

View File

@@ -1,21 +1,24 @@
# generated by fastapi-codegen:
# filename: ../../openapi.yml
# timestamp: 2023-08-17T11:26:07+00:00
# timestamp: 2023-08-24T13:55:59+00:00
from __future__ import annotations
from enum import Enum
from typing import Any, List, Optional
from typing import List, Optional
from pydantic import BaseModel, Field
class Pagination(BaseModel):
total_items: int = Field(..., description="Total number of items.", example=42)
total_pages: int = Field(..., description="Total number of pages.", example=97)
current_page: int = Field(..., description="Current_page page number.", example=1)
page_size: int = Field(..., description="Number of items per page.", example=25)
class TaskInput(BaseModel):
__root__: Any = Field(
...,
description="Input parameters for the task. Any value is allowed.",
example='{\n"debug": false,\n"mode": "benchmarks"\n}',
)
pass
class Artifact(BaseModel):
@@ -24,11 +27,8 @@ class Artifact(BaseModel):
description="ID of the artifact.",
example="b225e278-8b4c-4f99-a696-8facf19f0e56",
)
file_name: str = Field(
..., description="Filename of the artifact.", example="main.py"
)
agent_created: Optional[bool] = Field(
None,
agent_created: bool = Field(
...,
description="Whether the artifact has been created by the agent.",
example=False,
)
@@ -39,34 +39,17 @@ class Artifact(BaseModel):
)
class ArtifactUpload(BaseModel):
file: bytes = Field(..., description="File to upload.")
relative_path: Optional[str] = Field(
None,
description="Relative path of the artifact in the agent's workspace.",
example="python/code",
)
class StepInput(BaseModel):
__root__: Any = Field(
...,
description="Input parameters for the task step. Any value is allowed.",
example='{\n"file_to_refactor": "models.py"\n}',
)
pass
class StepOutput(BaseModel):
__root__: Any = Field(
...,
description="Output that the task step has produced. Any value is allowed.",
example='{\n"tokens": 7894,\n"estimated_cost": "0,24$"\n}',
)
pass
class TaskRequestBody(BaseModel):
input: Optional[str] = Field(
None,
input: str = Field(
...,
description="Input prompt for the task.",
example="Write the words you receive to the file 'output.txt'.",
)
@@ -80,7 +63,7 @@ class Task(TaskRequestBody):
example="50da533e-3904-4401-8a07-c49adf88b5eb",
)
artifacts: Optional[List[Artifact]] = Field(
...,
[],
description="A list of artifacts that the task has produced.",
example=[
"7a49f31c-f9c6-4346-a22c-e32bc5af4d8e",
@@ -90,8 +73,8 @@ class Task(TaskRequestBody):
class StepRequestBody(BaseModel):
input: Optional[str] = Field(
None, description="Input prompt for the step.", example="Washington"
input: str = Field(
..., description="Input prompt for the step.", example="Washington"
)
additional_input: Optional[StepInput] = None
@@ -116,7 +99,9 @@ class Step(StepRequestBody):
name: Optional[str] = Field(
None, description="The name of the task step.", example="Write to file"
)
status: Status = Field(..., description="The status of the task step.")
status: Status = Field(
..., description="The status of the task step.", example="created"
)
output: Optional[str] = Field(
None,
description="Output of the task step.",
@@ -127,5 +112,25 @@ class Step(StepRequestBody):
[], description="A list of artifacts that the step has produced."
)
is_last: Optional[bool] = Field(
False, description="Whether this is the last step in the task."
False, description="Whether this is the last step in the task.", example=True
)
class AgentTasksTaskIdArtifactsPostRequest(BaseModel):
file: Optional[bytes] = Field(None, description="File to upload.")
uri: Optional[str] = Field(None, description="URI of the artifact.")
class TaskListResponse(BaseModel):
tasks: Optional[List[Task]] = None
pagination: Optional[Pagination] = None
class TaskStepsListResponse(BaseModel):
steps: Optional[List[Step]] = None
pagination: Optional[Pagination] = None
class TaskArtifactsListResponse(BaseModel):
artifacts: Optional[List[Artifact]] = None
pagination: Optional[Pagination] = None

144
autogpt/tracing.py Normal file
View File

@@ -0,0 +1,144 @@
import os
from functools import wraps
from dotenv import load_dotenv
from autogpt.forge_log import CustomLogger
load_dotenv()
ENABLE_TRACING = os.environ.get("ENABLE_TRACING", "false").lower() == "true"
LOG = CustomLogger(__name__)
def setup_tracing(app):
LOG.info(f"Tracing status: {ENABLE_TRACING}")
if ENABLE_TRACING:
from opentelemetry import trace
from opentelemetry.exporter.jaeger.thrift import JaegerExporter
from opentelemetry.sdk.resources import SERVICE_NAME, Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
resource = Resource(attributes={SERVICE_NAME: "Auto-GPT-Forge"})
# Configure the tracer provider to export traces to Jaeger
jaeger_exporter = JaegerExporter(
agent_host_name="jaeger",
agent_port=6831,
)
provider = TracerProvider(resource=resource)
processor = BatchSpanProcessor(jaeger_exporter)
provider.add_span_processor(processor)
trace.set_tracer_provider(provider)
# Instrument FastAPI app
# FastAPIInstrumentor.instrument_app(app)
LOG.info("Tracing Setup")
if ENABLE_TRACING:
from functools import wraps
from opentelemetry import trace
from opentelemetry.trace import NonRecordingSpan
from pydantic import BaseModel
from autogpt.schema import Task
tasks_context_db = {}
class TaskIDTraceContext:
"""Custom context manager to override default tracing behavior."""
def __init__(self, task_id: str, span_name: str):
self.task_id = task_id
self.span_name = span_name
self.span = None
def __enter__(self):
# Get the default tracer
tracer = trace.get_tracer(__name__)
# Check if the task_id has been traced before
if self.task_id in tasks_context_db:
# Get the span context from the previous task
span_context = tasks_context_db[self.task_id].get("span_context")
LOG.info(
f"Task ID: {self.task_id} Span Context trace_id: {span_context.trace_id} span_id: {span_context.span_id}"
)
assert span_context, "No Span context for existing task_id"
# Create a new span linked to the previous span context
ctx = trace.set_span_in_context(NonRecordingSpan(span_context))
self.span = tracer.start_span(self.span_name, context=ctx)
else:
# If it's a new task_id, start a new span
self.span = tracer.start_span(self.span_name)
# Set this span in context and store it for future use
tasks_context_db[self.task_id] = {
"span_context": self.span.get_span_context()
}
return self.span
def __exit__(self, type, value, traceback):
if self.span:
self.span.end()
self.span = None
def tracing(operation_name: str, is_create_task: bool = False):
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
function_ran = False
task_id = "none"
if is_create_task:
result = await func(*args, **kwargs)
if isinstance(result, Task):
task_id = result.task_id
function_ran = True
else:
task_id = kwargs.get("task_id", "none")
step_id = kwargs.get("step_id", "none")
LOG.info(f"Starting Trace for task_id: {task_id}")
with TaskIDTraceContext(task_id, operation_name) as span:
span.set_attribute("task_id", task_id)
span.set_attribute("step_id", step_id)
# Add request event with all kwargs
kwargs_copy = {k: v for k, v in kwargs.items() if k != "request"}
for key, value in kwargs_copy.items():
if isinstance(value, BaseModel):
kwargs_copy[key] = value.json()
span.add_event(name="request", attributes=kwargs_copy)
if not function_ran:
result = await func(*args, **kwargs)
# Convert result to json before adding response event
if isinstance(result, BaseModel):
result_json = result.json()
span.add_event("response", {"response": result_json})
return result
return wrapper
return decorator
else:
def tracing(operation_name: str, is_create_task: bool = False):
"""
Stub function that does nothing so we can have a global enable tracing switch
"""
def decorator(func):
@wraps(func)
async def wrapper(*args, **kwargs):
return await func(*args, **kwargs)
return wrapper
return decorator

View File

@@ -12,6 +12,10 @@ from pathlib import Path
import dotenv
from .forge_log import CustomLogger
LOG = CustomLogger(__name__)
dotenv.load_dotenv()
import openai
@@ -37,21 +41,21 @@ def chat_completion_request(
user="TheForge",
)
except Exception as e:
print("Unable to generate ChatCompletion response")
print(f"Exception: {e}")
LOG.info("Unable to generate ChatCompletion response")
LOG.info(f"Exception: {e}")
exit()
def run(task: str):
"""Runs the agent for benchmarking"""
print("Running agent")
LOG.info("Running agent")
steps = plan(task)
execute_plan(steps)
# check for artifacts in workspace
items = glob.glob(os.path.join(workspace, "*"))
if items:
artifacts = []
print(f"Found {len(items)} artifacts in workspace")
LOG.info(f"Found {len(items)} artifacts in workspace")
for item in items:
with open(item, "r") as f:
item_contents = f.read()
@@ -69,7 +73,7 @@ def run(task: str):
def execute_plan(plan: typing.List[str]) -> None:
"""Each step is valid python, join the steps together into a python script and execute it"""
script = "\n".join(plan)
print(f"Executing script: \n{script}")
LOG.info(f"Executing script: \n{script}")
exec(script)
@@ -101,9 +105,9 @@ def plan(task: str) -> typing.List[str]:
Example answer:
{json_format}
"""
response = chat_completion_request(
messages=[{"role": "user", "content": planning_prompt}]
)
messages = [{"role": "user", "content": planning_prompt}]
response = chat_completion_request(messages=messages)
import json
@@ -137,7 +141,7 @@ def write_file(contents: str, filepath: str) -> bool:
f.write(contents)
success = True
except Exception as e:
print(f"Unable to write file: {e}")
LOG.info(f"Unable to write file: {e}")
return success
@@ -150,7 +154,7 @@ def read_file(filepath: str) -> typing.Optional[str]:
with open(filepath, "r") as f:
contents = f.read()
except Exception as e:
print(f"Unable to read file: {e}")
LOG.info(f"Unable to read file: {e}")
return contents
@@ -162,5 +166,11 @@ def read_webpage(url: str) -> typing.Optional[str]:
if response.status_code == 200:
contents = response.text
except Exception as e:
print(f"Unable to read webpage: {e}")
LOG.info(f"Unable to read webpage: {e}")
return contents
if __name__ == "__main__":
test_messages = [{"role": "user", "content": "Hello, how are you?"}]
response = chat_completion_request(test_messages)
LOG.info(response)

View File

@@ -3,7 +3,8 @@ import os
import typing
from pathlib import Path
from google.cloud import storage
import aiohttp
from fastapi import Response
class Workspace(abc.ABC):
@@ -12,25 +13,25 @@ class Workspace(abc.ABC):
self.base_path = base_path
@abc.abstractclassmethod
def read(self, path: str) -> bytes:
def read(self, task_id: str, path: str) -> bytes:
pass
@abc.abstractclassmethod
def write(self, path: str, data: bytes) -> None:
def write(self, task_id: str, path: str, data: bytes) -> None:
pass
@abc.abstractclassmethod
def delete(
self, path: str, directory: bool = False, recursive: bool = False
self, task_id: str, path: str, directory: bool = False, recursive: bool = False
) -> None:
pass
@abc.abstractclassmethod
def exists(self, path: str) -> bool:
def exists(self, task_id: str, path: str) -> bool:
pass
@abc.abstractclassmethod
def list(self, path: str) -> typing.List[str]:
def list(self, task_id: str, path: str) -> typing.List[str]:
pass
@@ -38,27 +39,28 @@ class LocalWorkspace(Workspace):
def __init__(self, base_path: str):
self.base_path = Path(base_path).resolve()
def _resolve_path(self, path: str) -> Path:
abs_path = (self.base_path / path).resolve()
def _resolve_path(self, task_id: str, path: str) -> Path:
abs_path = (self.base_path / task_id / path).resolve()
if not str(abs_path).startswith(str(self.base_path)):
raise ValueError("Directory traversal is not allowed!")
(self.base_path / task_id).mkdir(parents=True, exist_ok=True)
return abs_path
def read(self, path: str) -> bytes:
path = self.base_path / path
with open(self._resolve_path(path), "rb") as f:
def read(self, task_id: str, path: str) -> bytes:
path = self.base_path / task_id / path
with open(self._resolve_path(task_id, path), "rb") as f:
return f.read()
def write(self, path: str, data: bytes) -> None:
path = self.base_path / path
with open(self._resolve_path(path), "wb") as f:
def write(self, task_id: str, path: str, data: bytes) -> None:
path = self.base_path / task_id / path
with open(self._resolve_path(task_id, path), "wb") as f:
f.write(data)
def delete(
self, path: str, directory: bool = False, recursive: bool = False
self, task_id: str, path: str, directory: bool = False, recursive: bool = False
) -> None:
path = self.base_path / path
resolved_path = self._resolve_path(path)
path = self.base_path / task_id / path
resolved_path = self._resolve_path(task_id, path)
if directory:
if recursive:
os.rmdir(resolved_path)
@@ -67,61 +69,36 @@ class LocalWorkspace(Workspace):
else:
os.remove(resolved_path)
def exists(self, path: str) -> bool:
path = self.base_path / path
return self._resolve_path(path).exists()
def exists(self, task_id: str, path: str) -> bool:
path = self.base_path / task_id / path
return self._resolve_path(task_id, path).exists()
def list(self, path: str) -> typing.List[str]:
path = self.base_path / path
base = self._resolve_path(path)
return [str(p.relative_to(self.base_path)) for p in base.iterdir()]
def list(self, task_id: str, path: str) -> typing.List[str]:
path = self.base_path / task_id / path
base = self._resolve_path(task_id, path)
return [str(p.relative_to(self.base_path / task_id)) for p in base.iterdir()]
class GCSWorkspace(Workspace):
def __init__(self, base_path: str, bucket_name: str):
self.client = storage.Client()
self.bucket_name = bucket_name
self.base_path = base_path.strip("/") # Ensure no trailing or leading slash
def _resolve_path(self, path: str) -> str:
resolved = os.path.join(self.base_path, path).strip("/")
if not resolved.startswith(self.base_path):
raise ValueError("Directory traversal is not allowed!")
return resolved
def read(self, path: str) -> bytes:
path = self.base_path / path
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.get_blob(self._resolve_path(path))
return blob.download_as_bytes()
def write(self, path: str, data: bytes) -> None:
path = self.base_path / path
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.blob(self._resolve_path(path))
blob.upload_from_string(data)
def delete(
self, path: str, directory: bool = False, recursive: bool = False
) -> None:
path = self.base_path / path
bucket = self.client.get_bucket(self.bucket_name)
if directory and recursive:
# Note: GCS doesn't really have directories, so this will just delete all blobs with the given prefix
blobs = bucket.list_blobs(prefix=self._resolve_path(path))
bucket.delete_blobs(blobs)
async def load_from_uri(self, uri: str, task_id: str, workspace: Workspace) -> bytes:
"""
Load file from given URI and return its bytes.
"""
file_path = None
try:
if uri.startswith("file://"):
file_path = uri.split("file://")[1]
if not workspace.exists(task_id, file_path):
return Response(status_code=500, content="File not found")
return workspace.read(task_id, file_path)
elif uri.startswith("http://") or uri.startswith("https://"):
async with aiohttp.ClientSession() as session:
async with session.get(uri) as resp:
if resp.status != 200:
return Response(
status_code=500, content="Unable to load from URL"
)
return await resp.read()
else:
blob = bucket.blob(self._resolve_path(path))
blob.delete()
def exists(self, path: str) -> bool:
path = self.base_path / path
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.blob(self._resolve_path(path))
return blob.exists()
def list(self, path: str) -> typing.List[str]:
path = self.base_path / path
bucket = self.client.get_bucket(self.bucket_name)
blobs = bucket.list_blobs(prefix=self._resolve_path(path))
return [blob.name for blob in blobs]
return Response(status_code=500, content="Loading from unsupported uri")
except Exception as e:
return Response(status_code=500, content=str(e))

View File

@@ -8,6 +8,7 @@ from .workspace import LocalWorkspace
# Constants
TEST_BASE_PATH = "/tmp/test_workspace"
TEST_FILE_CONTENT = b"Hello World"
TEST_TASK_ID = "1234"
# Setup and Teardown for LocalWorkspace
@@ -24,23 +25,23 @@ def test_local_read_write_delete_exists(setup_local_workspace):
workspace = LocalWorkspace(TEST_BASE_PATH)
# Write
workspace.write("test_file.txt", TEST_FILE_CONTENT)
workspace.write(TEST_TASK_ID, "test_file.txt", TEST_FILE_CONTENT)
# Exists
assert workspace.exists("test_file.txt")
assert workspace.exists(TEST_TASK_ID, "test_file.txt")
# Read
assert workspace.read("test_file.txt") == TEST_FILE_CONTENT
assert workspace.read(TEST_TASK_ID, "test_file.txt") == TEST_FILE_CONTENT
# Delete
workspace.delete("test_file.txt")
assert not workspace.exists("test_file.txt")
workspace.delete(TEST_TASK_ID, "test_file.txt")
assert not workspace.exists(TEST_TASK_ID, "test_file.txt")
def test_local_list(setup_local_workspace):
workspace = LocalWorkspace(TEST_BASE_PATH)
workspace.write("test1.txt", TEST_FILE_CONTENT)
workspace.write("test2.txt", TEST_FILE_CONTENT)
workspace.write(TEST_TASK_ID, "test1.txt", TEST_FILE_CONTENT)
workspace.write(TEST_TASK_ID, "test2.txt", TEST_FILE_CONTENT)
files = workspace.list(".")
files = workspace.list(TEST_TASK_ID, ".")
assert set(files) == {"test1.txt", "test2.txt"}

25
docker-compose.yml Normal file
View File

@@ -0,0 +1,25 @@
version: '3'
services:
agent:
build:
context: .
dockerfile: Dockerfile
ports:
- 8000:8000
prometheus:
image: prom/prometheus
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
ports:
- 9090:9090
grafana:
image: grafana/grafana
ports:
- 3000:3000
jaeger:
image: jaegertracing/all-in-one
ports:
- 16686:16686
- 14268:14268

615
poetry.lock generated
View File

@@ -298,6 +298,24 @@ files = [
{file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"},
]
[[package]]
name = "asgiref"
version = "3.7.2"
description = "ASGI specs, helper code, and adapters"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"},
{file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"},
]
[package.dependencies]
typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""}
[package.extras]
tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "asttokens"
version = "2.2.1"
@@ -359,6 +377,18 @@ files = [
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
]
[[package]]
name = "backoff"
version = "2.2.1"
description = "Function decoration for backoff and retry"
category = "dev"
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
]
[[package]]
name = "black"
version = "23.7.0"
@@ -630,6 +660,24 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "colorlog"
version = "6.7.0"
description = "Add colours to the output of Python's logging module."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "colorlog-6.7.0-py2.py3-none-any.whl", hash = "sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662"},
{file = "colorlog-6.7.0.tar.gz", hash = "sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
[[package]]
name = "contourpy"
version = "1.1.0"
@@ -801,6 +849,24 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "deprecated"
version = "1.2.14"
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
{file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"},
{file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"},
]
[package.dependencies]
wrapt = ">=1.10,<2"
[package.extras]
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"]
[[package]]
name = "distlib"
version = "0.3.7"
@@ -1315,21 +1381,21 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"]
[[package]]
name = "googleapis-common-protos"
version = "1.60.0"
version = "1.56.2"
description = "Common protobufs used in Google APIs"
category = "main"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.6"
files = [
{file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"},
{file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"},
{file = "googleapis-common-protos-1.56.2.tar.gz", hash = "sha256:b09b56f5463070c2153753ef123f07d2e49235e89148e9b2459ec8ed2f68d7d3"},
{file = "googleapis_common_protos-1.56.2-py2.py3-none-any.whl", hash = "sha256:023eaea9d8c1cceccd9587c6af6c20f33eeeb05d4148670f2b0322dc1511700c"},
]
[package.dependencies]
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
protobuf = ">=3.15.0,<4.0.0dev"
[package.extras]
grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"]
grpc = ["grpcio (>=1.0.0,<2.0.0dev)"]
[[package]]
name = "gradio"
@@ -1471,6 +1537,64 @@ files = [
docs = ["Sphinx", "docutils (<0.18)"]
test = ["objgraph", "psutil"]
[[package]]
name = "grpcio"
version = "1.57.0"
description = "HTTP/2-based RPC framework"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "grpcio-1.57.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6"},
{file = "grpcio-1.57.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649"},
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f"},
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a"},
{file = "grpcio-1.57.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019"},
{file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527"},
{file = "grpcio-1.57.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca"},
{file = "grpcio-1.57.0-cp310-cp310-win32.whl", hash = "sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb"},
{file = "grpcio-1.57.0-cp310-cp310-win_amd64.whl", hash = "sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56"},
{file = "grpcio-1.57.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652"},
{file = "grpcio-1.57.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e"},
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e"},
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b"},
{file = "grpcio-1.57.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e"},
{file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d"},
{file = "grpcio-1.57.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a"},
{file = "grpcio-1.57.0-cp311-cp311-win32.whl", hash = "sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe"},
{file = "grpcio-1.57.0-cp311-cp311-win_amd64.whl", hash = "sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0"},
{file = "grpcio-1.57.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf"},
{file = "grpcio-1.57.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5"},
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2"},
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73"},
{file = "grpcio-1.57.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0"},
{file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc"},
{file = "grpcio-1.57.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2"},
{file = "grpcio-1.57.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98"},
{file = "grpcio-1.57.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51"},
{file = "grpcio-1.57.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0"},
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6"},
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941"},
{file = "grpcio-1.57.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f"},
{file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06"},
{file = "grpcio-1.57.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79"},
{file = "grpcio-1.57.0-cp38-cp38-win32.whl", hash = "sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb"},
{file = "grpcio-1.57.0-cp38-cp38-win_amd64.whl", hash = "sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16"},
{file = "grpcio-1.57.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b"},
{file = "grpcio-1.57.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6"},
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b"},
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e"},
{file = "grpcio-1.57.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5"},
{file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9"},
{file = "grpcio-1.57.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37"},
{file = "grpcio-1.57.0-cp39-cp39-win32.whl", hash = "sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766"},
{file = "grpcio-1.57.0-cp39-cp39-win_amd64.whl", hash = "sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056"},
{file = "grpcio-1.57.0.tar.gz", hash = "sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613"},
]
[package.extras]
protobuf = ["grpcio-tools (>=1.57.0)"]
[[package]]
name = "h11"
version = "0.14.0"
@@ -1670,6 +1794,26 @@ files = [
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
[[package]]
name = "importlib-metadata"
version = "6.8.0"
description = "Read metadata from Python packages"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
{file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
[[package]]
name = "importlib-resources"
version = "6.0.1"
@@ -2441,6 +2585,273 @@ dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "p
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"]
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
[[package]]
name = "opentelemetry-api"
version = "1.19.0"
description = "OpenTelemetry Python API"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"},
{file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"},
]
[package.dependencies]
deprecated = ">=1.2.6"
importlib-metadata = ">=6.0,<7.0"
[[package]]
name = "opentelemetry-exporter-jaeger"
version = "1.19.0"
description = "Jaeger Exporters for OpenTelemetry"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_jaeger-1.19.0-py3-none-any.whl", hash = "sha256:a7235a386eaa89bd866967c95dfd15ee5fd216cf313c50b9f5ebc27ee8ec4c76"},
{file = "opentelemetry_exporter_jaeger-1.19.0.tar.gz", hash = "sha256:feffaf67e1d9cead104f255d159ea412822f48a7b0aa4617090ab91b6d07de7e"},
]
[package.dependencies]
opentelemetry-exporter-jaeger-proto-grpc = "1.19.0"
opentelemetry-exporter-jaeger-thrift = "1.19.0"
[[package]]
name = "opentelemetry-exporter-jaeger-proto-grpc"
version = "1.19.0"
description = "Jaeger Protobuf Exporter for OpenTelemetry"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_jaeger_proto_grpc-1.19.0-py3-none-any.whl", hash = "sha256:702b71c2b15b0d6fc261de69ec5793b99300bc5986be57517d00ad4b3148be21"},
{file = "opentelemetry_exporter_jaeger_proto_grpc-1.19.0.tar.gz", hash = "sha256:2d7d64dd8b4ae95f55dba549efb24229ee0c6cbed4d4f6cc16bda69ec4ce8203"},
]
[package.dependencies]
googleapis-common-protos = ">=1.52,<1.56.3"
grpcio = ">=1.0.0,<2.0.0"
opentelemetry-api = ">=1.3,<2.0"
opentelemetry-sdk = ">=1.11,<2.0"
[[package]]
name = "opentelemetry-exporter-jaeger-thrift"
version = "1.19.0"
description = "Jaeger Thrift Exporter for OpenTelemetry"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_jaeger_thrift-1.19.0-py3-none-any.whl", hash = "sha256:ed22f3c3dab822e5bf7e190302b51eafccce9dd58d43226fc6546676076acecc"},
{file = "opentelemetry_exporter_jaeger_thrift-1.19.0.tar.gz", hash = "sha256:f73daebd2cfb630e391098b11d1aa0d8bcaadb504197d2ecf6dfa8708c7c48ac"},
]
[package.dependencies]
opentelemetry-api = ">=1.3,<2.0"
opentelemetry-sdk = ">=1.11,<2.0"
thrift = ">=0.10.0"
[[package]]
name = "opentelemetry-exporter-otlp"
version = "1.19.0"
description = "OpenTelemetry Collector Exporters"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_otlp-1.19.0-py3-none-any.whl", hash = "sha256:5316ffc754d5a4fb4faaa811a837593edd680bed429fd7c10d898e45f4946903"},
{file = "opentelemetry_exporter_otlp-1.19.0.tar.gz", hash = "sha256:2d4b066180452b8e74a0a598d98901769148a0fe17900c70aca37d3c7c4e8aaa"},
]
[package.dependencies]
opentelemetry-exporter-otlp-proto-grpc = "1.19.0"
opentelemetry-exporter-otlp-proto-http = "1.19.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
version = "1.19.0"
description = "OpenTelemetry Protobuf encoding"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_otlp_proto_common-1.19.0-py3-none-any.whl", hash = "sha256:792d5496ecfebaf4f56b2c434c5e2823f88ffaeb5dcd272ea423b249fd52bded"},
{file = "opentelemetry_exporter_otlp_proto_common-1.19.0.tar.gz", hash = "sha256:c13d02a31dec161f8910d96db6b58309af17d92b827c64284bf85eec3f2d7297"},
]
[package.dependencies]
opentelemetry-proto = "1.19.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
version = "1.19.0"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_otlp_proto_grpc-1.19.0-py3-none-any.whl", hash = "sha256:ae2a6484d12ba4d0f1096c4565193c1c27a951a9d2b10a9a84da3e1f866e84d6"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.19.0.tar.gz", hash = "sha256:e69261b4da8cbaa42d9b5f1cff4fcebbf8a3c02f85d69a8aea698312084f4180"},
]
[package.dependencies]
backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""}
deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
grpcio = ">=1.0.0,<2.0.0"
opentelemetry-api = ">=1.15,<2.0"
opentelemetry-exporter-otlp-proto-common = "1.19.0"
opentelemetry-proto = "1.19.0"
opentelemetry-sdk = ">=1.19.0,<1.20.0"
[package.extras]
test = ["pytest-grpc"]
[[package]]
name = "opentelemetry-exporter-otlp-proto-http"
version = "1.19.0"
description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_exporter_otlp_proto_http-1.19.0-py3-none-any.whl", hash = "sha256:4e050ca57819519a3cb8a6b17feac0d3b4b115796674b6a26295036ae941e11a"},
{file = "opentelemetry_exporter_otlp_proto_http-1.19.0.tar.gz", hash = "sha256:7c8d2c1268cef4d9c1b13a1399f510e3c3dbb88c52f54597d487a128a23b681e"},
]
[package.dependencies]
backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""}
deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
opentelemetry-api = ">=1.15,<2.0"
opentelemetry-exporter-otlp-proto-common = "1.19.0"
opentelemetry-proto = "1.19.0"
opentelemetry-sdk = ">=1.19.0,<1.20.0"
requests = ">=2.7,<3.0"
[package.extras]
test = ["responses (==0.22.0)"]
[[package]]
name = "opentelemetry-instrumentation"
version = "0.40b0"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_instrumentation-0.40b0-py3-none-any.whl", hash = "sha256:789d3726e698aa9526dd247b461b9172f99a4345571546c4aecf40279679fc8e"},
{file = "opentelemetry_instrumentation-0.40b0.tar.gz", hash = "sha256:08bebe6a752514ed61e901e9fee5ccf06ae7533074442e707d75bb65f3e0aa17"},
]
[package.dependencies]
opentelemetry-api = ">=1.4,<2.0"
setuptools = ">=16.0"
wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-instrumentation-asgi"
version = "0.40b0"
description = "ASGI instrumentation for OpenTelemetry"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_instrumentation_asgi-0.40b0-py3-none-any.whl", hash = "sha256:3fc6940bacaccf2d96c24bd937a46dade28b930df8ec4e1231f612f2a66e4496"},
{file = "opentelemetry_instrumentation_asgi-0.40b0.tar.gz", hash = "sha256:98678ef9e3856746dd52b11b8c6ce258acc70ecb910c9053ad7aaabab8fa71f2"},
]
[package.dependencies]
asgiref = ">=3.0,<4.0"
opentelemetry-api = ">=1.12,<2.0"
opentelemetry-instrumentation = "0.40b0"
opentelemetry-semantic-conventions = "0.40b0"
opentelemetry-util-http = "0.40b0"
[package.extras]
instruments = ["asgiref (>=3.0,<4.0)"]
test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.40b0)"]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
version = "0.40b0"
description = "OpenTelemetry FastAPI Instrumentation"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_instrumentation_fastapi-0.40b0-py3-none-any.whl", hash = "sha256:2d71b41b460ebadc347a87ef6c9595864965745a7752b1e08d064eea327e350e"},
{file = "opentelemetry_instrumentation_fastapi-0.40b0.tar.gz", hash = "sha256:d97276a4bda9155de74b0761cdf52831d22fb93894b644ebba026501aa6f7a94"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
opentelemetry-instrumentation = "0.40b0"
opentelemetry-instrumentation-asgi = "0.40b0"
opentelemetry-semantic-conventions = "0.40b0"
opentelemetry-util-http = "0.40b0"
[package.extras]
instruments = ["fastapi (>=0.58,<1.0)"]
test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.40b0)", "requests (>=2.23,<3.0)"]
[[package]]
name = "opentelemetry-proto"
version = "1.19.0"
description = "OpenTelemetry Python Proto"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_proto-1.19.0-py3-none-any.whl", hash = "sha256:d06391cb5fa0fab111b42d3adf5f22683748745bd8f59ed4acfd93cfd252b960"},
{file = "opentelemetry_proto-1.19.0.tar.gz", hash = "sha256:be53205622d85ecd37ebbf764aed907d87620a45eae638860cb5a778bf900c04"},
]
[package.dependencies]
protobuf = ">=3.19,<5.0"
[[package]]
name = "opentelemetry-sdk"
version = "1.19.0"
description = "OpenTelemetry Python SDK"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_sdk-1.19.0-py3-none-any.whl", hash = "sha256:bb67ad676b1bc671766a40d7fc9d9563854c186fa11f0dc8fa2284e004bd4263"},
{file = "opentelemetry_sdk-1.19.0.tar.gz", hash = "sha256:765928956262c7a7766eaba27127b543fb40ef710499cad075f261f52163a87f"},
]
[package.dependencies]
opentelemetry-api = "1.19.0"
opentelemetry-semantic-conventions = "0.40b0"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.40b0"
description = "OpenTelemetry Semantic Conventions"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_semantic_conventions-0.40b0-py3-none-any.whl", hash = "sha256:7ebbaf86755a0948902e68637e3ae516c50222c30455e55af154ad3ffe283839"},
{file = "opentelemetry_semantic_conventions-0.40b0.tar.gz", hash = "sha256:5a7a491873b15ab7c4907bbfd8737645cc87ca55a0a326c1755d1b928d8a0fae"},
]
[[package]]
name = "opentelemetry-util-http"
version = "0.40b0"
description = "Web util for OpenTelemetry"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "opentelemetry_util_http-0.40b0-py3-none-any.whl", hash = "sha256:7e071110b724a24d70de7441aeb4541bf8495ba5f5c33927e6b806d597379d0f"},
{file = "opentelemetry_util_http-0.40b0.tar.gz", hash = "sha256:47d93efa1bb6c71954a5c6ae29a9546efae77f6875dc6c807a76898e0d478b80"},
]
[[package]]
name = "orjson"
version = "3.9.5"
@@ -2807,6 +3218,37 @@ files = [
{file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"},
]
[[package]]
name = "prometheus-client"
version = "0.17.1"
description = "Python client for the Prometheus monitoring system."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"},
{file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"},
]
[package.extras]
twisted = ["twisted"]
[[package]]
name = "prometheus-fastapi-instrumentator"
version = "6.1.0"
description = "Instrument your FastAPI with Prometheus metrics."
category = "dev"
optional = false
python-versions = ">=3.7.0,<4.0.0"
files = [
{file = "prometheus_fastapi_instrumentator-6.1.0-py3-none-any.whl", hash = "sha256:2279ac1cf5b9566a4c3a07f78c9c5ee19648ed90976ab87d73d672abc1bfa017"},
{file = "prometheus_fastapi_instrumentator-6.1.0.tar.gz", hash = "sha256:1820d7a90389ce100f7d1285495ead388818ae0882e761c1f3e6e62a410bdf13"},
]
[package.dependencies]
fastapi = ">=0.38.1,<1.0.0"
prometheus-client = ">=0.8.0,<1.0.0"
[[package]]
name = "prompt-toolkit"
version = "3.0.39"
@@ -2824,25 +3266,34 @@ wcwidth = "*"
[[package]]
name = "protobuf"
version = "4.24.0"
description = ""
version = "3.20.3"
description = "Protocol Buffers"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"},
{file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"},
{file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"},
{file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"},
{file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"},
{file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"},
{file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"},
{file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"},
{file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"},
{file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"},
{file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"},
{file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"},
{file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"},
{file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"},
{file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"},
{file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"},
{file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"},
{file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"},
{file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"},
{file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"},
{file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"},
{file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"},
{file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"},
{file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"},
{file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"},
{file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"},
{file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"},
{file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"},
{file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"},
{file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"},
{file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"},
{file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"},
{file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"},
{file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"},
{file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"},
]
[[package]]
@@ -3642,6 +4093,25 @@ files = [
[package.extras]
doc = ["reno", "sphinx", "tornado (>=4.5)"]
[[package]]
name = "thrift"
version = "0.16.0"
description = "Python bindings for the Apache Thrift RPC system"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
]
[package.dependencies]
six = ">=1.7.2"
[package.extras]
all = ["tornado (>=4.0)", "twisted"]
tornado = ["tornado (>=4.0)"]
twisted = ["twisted"]
[[package]]
name = "tomli"
version = "2.0.1"
@@ -4019,6 +4489,91 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog"]
[[package]]
name = "wrapt"
version = "1.15.0"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
files = [
{file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
{file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
{file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
{file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
{file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
{file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
{file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
{file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
{file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
{file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
{file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
{file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
{file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
{file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
{file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
{file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
{file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
{file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
{file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
{file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
{file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
{file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
{file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
{file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
{file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
{file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
{file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
{file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
{file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
{file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
{file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
{file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
{file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
{file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
{file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
]
[[package]]
name = "wsproto"
version = "1.2.0"
@@ -4122,7 +4677,23 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
[[package]]
name = "zipp"
version = "3.16.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"},
{file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "74c858097e6642dd28b70bed5c198022f6efd16d930b722e15912986666a2f07"
content-hash = "7b4bac6d11c51a275f9b2094cf26e159f58a975809c83a61ebf4e333911e4ff2"

8
prometheus.yml Normal file
View File

@@ -0,0 +1,8 @@
global:
scrape_interval: 15s # By default, scrape targets every 15 seconds.
evaluation_interval: 15s # Evaluate rules every 15 seconds.
scrape_configs:
- job_name: 'autogpt'
static_configs:
- targets: ['agent:8000']

View File

@@ -2,8 +2,8 @@
name = "Auto-GPT-Forge"
version = "0.1.0"
description = ""
authors = []
authors = ["Craig Swift <craigswift13@gmail.com>"]
license = "MIT"
readme = "README.md"
[tool.poetry.dependencies]
@@ -14,6 +14,8 @@ helicone = "^1.0.6"
tenacity = "^8.2.2"
sqlalchemy = "^2.0.19"
google-cloud-storage = "^2.10.0"
aiohttp = "^3.8.5"
colorlog = "^6.7.0"
[tool.poetry.group.dev.dependencies]
@@ -37,6 +39,15 @@ dash = "^2.11.1"
pandas = "^2.0.3"
dash-bootstrap-components = "^1.4.2"
[tool.poetry.group.monitoring.dependencies]
prometheus-fastapi-instrumentator = "^6.1.0"
opentelemetry-api = "^1.19.0"
opentelemetry-sdk = "^1.19.0"
opentelemetry-exporter-otlp = "^1.19.0"
opentelemetry-instrumentation-fastapi = "^0.40b0"
opentelemetry-exporter-jaeger = "^1.19.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"