mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-09 15:17:59 -05:00
feat(rnd): Reduce container size remove dep with forge and autogpt (#8040)
* Remove forge and autogpt * update lock files * Update build process to reduce image size * Reduced built image size * fixed docker compose watch * Updated logging * updated env.example * formatting * linting issue * linting not working in github actions.. * trying to get around github action linting issue * updated version * sleep for prisma issues * add exp backoff on connection issues * updated config based on review comments * Sorting alphabetical * updated default config * updated depends checks * fixed missing prisma binaries * remove dead layer * remove try * remove dead layer
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from .depends import verify_user, requires_admin_user, requires_user
|
||||
from .depends import requires_admin_user, requires_user, verify_user
|
||||
|
||||
|
||||
def test_verify_user_no_payload():
|
||||
|
||||
9
rnd/autogpt_libs/autogpt_libs/logging/__init__.py
Normal file
9
rnd/autogpt_libs/autogpt_libs/logging/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .config import configure_logging
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import FancyConsoleFormatter
|
||||
|
||||
__all__ = [
|
||||
"configure_logging",
|
||||
"BelowLevelFilter",
|
||||
"FancyConsoleFormatter",
|
||||
]
|
||||
166
rnd/autogpt_libs/autogpt_libs/logging/config.py
Normal file
166
rnd/autogpt_libs/autogpt_libs/logging/config.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Logging module for Auto-GPT."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import AGPTFormatter, StructuredLoggingFormatter
|
||||
|
||||
LOG_DIR = Path(__file__).parent.parent.parent.parent / "logs"
|
||||
LOG_FILE = "activity.log"
|
||||
DEBUG_LOG_FILE = "debug.log"
|
||||
ERROR_LOG_FILE = "error.log"
|
||||
|
||||
SIMPLE_LOG_FORMAT = "%(asctime)s %(levelname)s %(title)s%(message)s"
|
||||
|
||||
DEBUG_LOG_FORMAT = (
|
||||
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d" " %(title)s%(message)s"
|
||||
)
|
||||
|
||||
|
||||
class LoggingConfig(BaseSettings):
|
||||
|
||||
level: str = Field(
|
||||
default="INFO",
|
||||
description="Logging level",
|
||||
validation_alias="LOG_LEVEL",
|
||||
)
|
||||
|
||||
enable_cloud_logging: bool = Field(
|
||||
default=False,
|
||||
description="Enable logging to Google Cloud Logging",
|
||||
)
|
||||
|
||||
enable_file_logging: bool = Field(
|
||||
default=False,
|
||||
description="Enable logging to file",
|
||||
)
|
||||
# File output
|
||||
log_dir: Path = Field(
|
||||
default=LOG_DIR,
|
||||
description="Log directory",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_prefix="",
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
@field_validator("level", mode="before")
|
||||
@classmethod
|
||||
def parse_log_level(cls, v):
|
||||
if isinstance(v, str):
|
||||
v = v.upper()
|
||||
if v not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]:
|
||||
raise ValueError(f"Invalid log level: {v}")
|
||||
return v
|
||||
return v
|
||||
|
||||
|
||||
def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
"""Configure the native logging module based on the LoggingConfig settings.
|
||||
|
||||
This function sets up logging handlers and formatters according to the
|
||||
configuration specified in the LoggingConfig object. It supports various
|
||||
logging outputs including console, file, cloud, and JSON logging.
|
||||
|
||||
The function uses the LoggingConfig object to determine which logging
|
||||
features to enable and how to configure them. This includes setting
|
||||
log levels, log formats, and output destinations.
|
||||
|
||||
No arguments are required as the function creates its own LoggingConfig
|
||||
instance internally.
|
||||
|
||||
Note: This function is typically called at the start of the application
|
||||
to set up the logging infrastructure.
|
||||
"""
|
||||
|
||||
config = LoggingConfig()
|
||||
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# Cloud logging setup
|
||||
if config.enable_cloud_logging or force_cloud_logging:
|
||||
import google.cloud.logging
|
||||
from google.cloud.logging.handlers import CloudLoggingHandler
|
||||
from google.cloud.logging_v2.handlers.transports.sync import SyncTransport
|
||||
|
||||
client = google.cloud.logging.Client()
|
||||
cloud_handler = CloudLoggingHandler(
|
||||
client,
|
||||
name="autogpt_logs",
|
||||
transport=SyncTransport,
|
||||
)
|
||||
cloud_handler.setLevel(config.level)
|
||||
cloud_handler.setFormatter(StructuredLoggingFormatter())
|
||||
log_handlers.append(cloud_handler)
|
||||
print("Cloud logging enabled")
|
||||
else:
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
print("Console logging enabled")
|
||||
|
||||
# File logging setup
|
||||
if config.enable_file_logging:
|
||||
# create log directory if it doesn't exist
|
||||
if not config.log_dir.exists():
|
||||
config.log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
print(f"Log directory: {config.log_dir}")
|
||||
|
||||
# Activity log handler (INFO and above)
|
||||
activity_log_handler = logging.FileHandler(
|
||||
config.log_dir / LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
activity_log_handler.setLevel(config.level)
|
||||
activity_log_handler.setFormatter(
|
||||
AGPTFormatter(SIMPLE_LOG_FORMAT, no_color=True)
|
||||
)
|
||||
log_handlers.append(activity_log_handler)
|
||||
|
||||
if config.level == logging.DEBUG:
|
||||
# Debug log handler (all levels)
|
||||
debug_log_handler = logging.FileHandler(
|
||||
config.log_dir / DEBUG_LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
debug_log_handler.setLevel(logging.DEBUG)
|
||||
debug_log_handler.setFormatter(
|
||||
AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True)
|
||||
)
|
||||
log_handlers.append(debug_log_handler)
|
||||
|
||||
# Error log handler (ERROR and above)
|
||||
error_log_handler = logging.FileHandler(
|
||||
config.log_dir / ERROR_LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
error_log_handler.setLevel(logging.ERROR)
|
||||
error_log_handler.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True))
|
||||
log_handlers.append(error_log_handler)
|
||||
print("File logging enabled")
|
||||
|
||||
# Configure the root logger
|
||||
logging.basicConfig(
|
||||
format=DEBUG_LOG_FORMAT if config.level == logging.DEBUG else SIMPLE_LOG_FORMAT,
|
||||
level=config.level,
|
||||
handlers=log_handlers,
|
||||
)
|
||||
12
rnd/autogpt_libs/autogpt_libs/logging/filters.py
Normal file
12
rnd/autogpt_libs/autogpt_libs/logging/filters.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import logging
|
||||
|
||||
|
||||
class BelowLevelFilter(logging.Filter):
|
||||
"""Filter for logging levels below a certain threshold."""
|
||||
|
||||
def __init__(self, below_level: int):
|
||||
super().__init__()
|
||||
self.below_level = below_level
|
||||
|
||||
def filter(self, record: logging.LogRecord):
|
||||
return record.levelno < self.below_level
|
||||
95
rnd/autogpt_libs/autogpt_libs/logging/formatters.py
Normal file
95
rnd/autogpt_libs/autogpt_libs/logging/formatters.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import logging
|
||||
|
||||
from colorama import Fore, Style
|
||||
from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
|
||||
class FancyConsoleFormatter(logging.Formatter):
|
||||
"""
|
||||
A custom logging formatter designed for console output.
|
||||
|
||||
This formatter enhances the standard logging output with color coding. The color
|
||||
coding is based on the level of the log message, making it easier to distinguish
|
||||
between different types of messages in the console output.
|
||||
|
||||
The color for each level is defined in the LEVEL_COLOR_MAP class attribute.
|
||||
"""
|
||||
|
||||
# level -> (level & text color, title color)
|
||||
LEVEL_COLOR_MAP = {
|
||||
logging.DEBUG: Fore.LIGHTBLACK_EX,
|
||||
logging.INFO: Fore.BLUE,
|
||||
logging.WARNING: Fore.YELLOW,
|
||||
logging.ERROR: Fore.RED,
|
||||
logging.CRITICAL: Fore.RED + Style.BRIGHT,
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
# Make sure `msg` is a string
|
||||
if not hasattr(record, "msg"):
|
||||
record.msg = ""
|
||||
elif type(record.msg) is not str:
|
||||
record.msg = str(record.msg)
|
||||
|
||||
# Determine default color based on error level
|
||||
level_color = ""
|
||||
if record.levelno in self.LEVEL_COLOR_MAP:
|
||||
level_color = self.LEVEL_COLOR_MAP[record.levelno]
|
||||
record.levelname = f"{level_color}{record.levelname}{Style.RESET_ALL}"
|
||||
|
||||
# Determine color for message
|
||||
color = getattr(record, "color", level_color)
|
||||
color_is_specified = hasattr(record, "color")
|
||||
|
||||
# Don't color INFO messages unless the color is explicitly specified.
|
||||
if color and (record.levelno != logging.INFO or color_is_specified):
|
||||
record.msg = f"{color}{record.msg}{Style.RESET_ALL}"
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class AGPTFormatter(FancyConsoleFormatter):
|
||||
def __init__(self, *args, no_color: bool = False, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.no_color = no_color
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
# Make sure `msg` is a string
|
||||
if not hasattr(record, "msg"):
|
||||
record.msg = ""
|
||||
elif type(record.msg) is not str:
|
||||
record.msg = str(record.msg)
|
||||
|
||||
# Strip color from the message to prevent color spoofing
|
||||
if record.msg and not getattr(record, "preserve_color", False):
|
||||
record.msg = remove_color_codes(record.msg)
|
||||
|
||||
# Determine color for title
|
||||
title = getattr(record, "title", "")
|
||||
title_color = getattr(record, "title_color", "") or self.LEVEL_COLOR_MAP.get(
|
||||
record.levelno, ""
|
||||
)
|
||||
if title and title_color:
|
||||
title = f"{title_color + Style.BRIGHT}{title}{Style.RESET_ALL}"
|
||||
# Make sure record.title is set, and padded with a space if not empty
|
||||
record.title = f"{title} " if title else ""
|
||||
|
||||
if self.no_color:
|
||||
return remove_color_codes(super().format(record))
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class StructuredLoggingFormatter(StructuredLogHandler, logging.Formatter):
|
||||
def __init__(self):
|
||||
# Set up CloudLoggingFilter to add diagnostic info to the log records
|
||||
self.cloud_logging_filter = CloudLoggingFilter()
|
||||
|
||||
# Init StructuredLogHandler
|
||||
super().__init__()
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
self.cloud_logging_filter.filter(record)
|
||||
return super().format(record)
|
||||
14
rnd/autogpt_libs/autogpt_libs/logging/handlers.py
Normal file
14
rnd/autogpt_libs/autogpt_libs/logging/handlers.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
|
||||
class JsonFileHandler(logging.FileHandler):
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
record.json_data = json.loads(record.getMessage())
|
||||
return json.dumps(getattr(record, "json_data"), ensure_ascii=False, indent=4)
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
with open(self.baseFilename, "w", encoding="utf-8") as f:
|
||||
f.write(self.format(record))
|
||||
36
rnd/autogpt_libs/autogpt_libs/logging/test_utils.py
Normal file
36
rnd/autogpt_libs/autogpt_libs/logging/test_utils.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import pytest
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_text, clean_text",
|
||||
[
|
||||
(
|
||||
"COMMAND = \x1b[36mbrowse_website\x1b[0m "
|
||||
"ARGUMENTS = \x1b[36m{'url': 'https://www.google.com',"
|
||||
" 'question': 'What is the capital of France?'}\x1b[0m",
|
||||
"COMMAND = browse_website "
|
||||
"ARGUMENTS = {'url': 'https://www.google.com',"
|
||||
" 'question': 'What is the capital of France?'}",
|
||||
),
|
||||
(
|
||||
"{'Schaue dir meine Projekte auf github () an, als auch meine Webseiten': "
|
||||
"'https://github.com/Significant-Gravitas/AutoGPT,"
|
||||
" https://discord.gg/autogpt und https://twitter.com/Auto_GPT'}",
|
||||
"{'Schaue dir meine Projekte auf github () an, als auch meine Webseiten': "
|
||||
"'https://github.com/Significant-Gravitas/AutoGPT,"
|
||||
" https://discord.gg/autogpt und https://twitter.com/Auto_GPT'}",
|
||||
),
|
||||
("", ""),
|
||||
("hello", "hello"),
|
||||
("hello\x1B[31m world", "hello world"),
|
||||
("\x1B[36mHello,\x1B[32m World!", "Hello, World!"),
|
||||
(
|
||||
"\x1B[1m\x1B[31mError:\x1B[0m\x1B[31m file not found",
|
||||
"Error: file not found",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_remove_color_codes(raw_text, clean_text):
|
||||
assert remove_color_codes(raw_text) == clean_text
|
||||
27
rnd/autogpt_libs/autogpt_libs/logging/utils.py
Normal file
27
rnd/autogpt_libs/autogpt_libs/logging/utils.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from colorama import Fore
|
||||
|
||||
|
||||
def remove_color_codes(s: str) -> str:
|
||||
return re.sub(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])", "", s)
|
||||
|
||||
|
||||
def fmt_kwargs(kwargs: dict) -> str:
|
||||
return ", ".join(f"{n}={repr(v)}" for n, v in kwargs.items())
|
||||
|
||||
|
||||
def print_attribute(
|
||||
title: str, value: Any, title_color: str = Fore.GREEN, value_color: str = ""
|
||||
) -> None:
|
||||
logger = logging.getLogger()
|
||||
logger.info(
|
||||
str(value),
|
||||
extra={
|
||||
"title": f"{title.rstrip(':')}:",
|
||||
"title_color": title_color,
|
||||
"color": value_color,
|
||||
},
|
||||
)
|
||||
1439
rnd/autogpt_libs/poetry.lock
generated
1439
rnd/autogpt_libs/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,19 +1,21 @@
|
||||
[tool.poetry]
|
||||
name = "autogpt-libs"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
description = "Shared libraries across NextGen AutoGPT"
|
||||
authors = ["Aarushi <aarushik93@gmail.com>"]
|
||||
readme = "README.md"
|
||||
packages = [{ include = "autogpt_libs" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
google-cloud-logging = "^3.8.0"
|
||||
pydantic = "^2.8.2"
|
||||
pydantic-settings = "^2.5.2"
|
||||
pyjwt = "^2.8.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.7.2"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
@@ -51,3 +51,11 @@ SMTP_PASSWORD=
|
||||
# Medium
|
||||
MEDIUM_API_KEY=
|
||||
MEDIUM_AUTHOR_ID=
|
||||
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
ENABLE_FILE_LOGGING=false
|
||||
# Use to manually set the log directory
|
||||
# LOG_DIR=./logs
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-slim-buster AS server_base
|
||||
FROM python:3.11-slim-buster AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
@@ -6,16 +6,11 @@ ENV PYTHONUNBUFFERED 1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# postgresql-client is needed to check if the postgres service is ready for running migrations
|
||||
# Install build dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
|
||||
&& tar -zxf git.tar.gz \
|
||||
&& cd git-* \
|
||||
&& make prefix=/usr all \
|
||||
&& make prefix=/usr install
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
@@ -24,29 +19,48 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN pip3 install poetry
|
||||
|
||||
FROM server_base AS server_dependencies
|
||||
# Copy and install dependencies
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
COPY rnd/autogpt_server/poetry.lock rnd/autogpt_server/pyproject.toml /app/rnd/autogpt_server/
|
||||
WORKDIR /app/rnd/autogpt_server
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
|
||||
# Generate Prisma client
|
||||
COPY rnd/autogpt_server/schema.prisma ./
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry run prisma generate
|
||||
|
||||
FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
COPY --from=builder /usr/local/bin /usr/local/bin
|
||||
# Copy Prisma binaries
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/autogpt
|
||||
RUN mkdir -p /app/forge
|
||||
RUN mkdir -p /app/rnd/autogpt_libs
|
||||
RUN mkdir -p /app/rnd/autogpt_server
|
||||
|
||||
COPY autogpt /app/autogpt
|
||||
COPY forge /app/forge
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
|
||||
COPY rnd/autogpt_server/poetry.lock rnd/autogpt_server/pyproject.toml /app/rnd/autogpt_server/
|
||||
|
||||
WORKDIR /app/rnd/autogpt_server
|
||||
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
FROM server_dependencies AS server_prisma
|
||||
|
||||
COPY rnd/autogpt_server/schema.prisma ./
|
||||
RUN poetry run prisma generate
|
||||
|
||||
FROM server_prisma AS server
|
||||
FROM server_dependencies AS server
|
||||
|
||||
COPY rnd/autogpt_server /app/rnd/autogpt_server
|
||||
|
||||
|
||||
@@ -1,190 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Iterator
|
||||
|
||||
from autogpt.agents.agent import Agent, AgentSettings
|
||||
from autogpt.app.config import ConfigBuilder
|
||||
from forge.agent.components import AgentComponent
|
||||
from forge.agent.protocols import CommandProvider
|
||||
from forge.command import command
|
||||
from forge.command.command import Command
|
||||
from forge.file_storage import FileStorageBackendName, get_storage
|
||||
from forge.file_storage.base import FileStorage
|
||||
from forge.llm.providers import MultiProvider
|
||||
from forge.llm.providers.openai import OpenAICredentials, OpenAIProvider
|
||||
from forge.llm.providers.schema import ModelProviderName
|
||||
from forge.models.json_schema import JSONSchema
|
||||
from pydantic import Field, SecretStr
|
||||
|
||||
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt.app.config import AppConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BlockAgentSettings(AgentSettings):
|
||||
enabled_components: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class OutputComponent(CommandProvider):
|
||||
def get_commands(self) -> Iterator[Command]:
|
||||
yield self.output
|
||||
|
||||
@command(
|
||||
parameters={
|
||||
"output": JSONSchema(
|
||||
type=JSONSchema.Type.STRING,
|
||||
description="Output data to be returned.",
|
||||
required=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
def output(self, output: str) -> str:
|
||||
"""Use this to output the result."""
|
||||
return output
|
||||
|
||||
|
||||
class BlockAgent(Agent):
|
||||
def __init__(
|
||||
self,
|
||||
settings: BlockAgentSettings,
|
||||
llm_provider: MultiProvider,
|
||||
file_storage: FileStorage,
|
||||
app_config: AppConfig,
|
||||
):
|
||||
super().__init__(settings, llm_provider, file_storage, app_config)
|
||||
|
||||
self.output = OutputComponent()
|
||||
|
||||
# Disable components
|
||||
for attr_name in list(self.__dict__.keys()):
|
||||
attr_value = getattr(self, attr_name)
|
||||
if not isinstance(attr_value, AgentComponent):
|
||||
continue
|
||||
component_name = type(attr_value).__name__
|
||||
if (
|
||||
component_name != "SystemComponent"
|
||||
and component_name not in settings.enabled_components
|
||||
):
|
||||
delattr(self, attr_name)
|
||||
|
||||
|
||||
class AutoGPTAgentBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
task: str = SchemaField(
|
||||
description="Task description for the agent.",
|
||||
placeholder="Calculate and use Output command",
|
||||
)
|
||||
input: str = SchemaField(
|
||||
description="Input data for the task",
|
||||
placeholder="8 + 5",
|
||||
)
|
||||
openai_api_key: BlockSecret = SecretField(
|
||||
key="openai_api_key", description="OpenAI API key"
|
||||
)
|
||||
enabled_components: list[str] = Field(
|
||||
default_factory=lambda: [OutputComponent.__name__],
|
||||
description="List of [AgentComponents](https://docs.agpt.co/forge/components/built-in-components/) enabled for the agent.",
|
||||
)
|
||||
disabled_commands: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="List of commands from enabled components to disable.",
|
||||
)
|
||||
fast_mode: bool = Field(
|
||||
False,
|
||||
description="If true uses fast llm, otherwise uses smart and slow llm.",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: str
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d2e2ecd2-9ae6-422d-8dfe-ceca500ce6a6",
|
||||
description="AutoGPT agent, it utilizes a Large Language Model and enabled components/tools to perform a task.",
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=AutoGPTAgentBlock.Input,
|
||||
output_schema=AutoGPTAgentBlock.Output,
|
||||
test_input={
|
||||
"task": "Make calculations and use output command to output the result",
|
||||
"input": "5 + 3",
|
||||
"openai_api_key": "openai_api_key",
|
||||
"enabled_components": [OutputComponent.__name__],
|
||||
"disabled_commands": ["finish"],
|
||||
"fast_mode": True,
|
||||
},
|
||||
test_output=[
|
||||
("result", "8"),
|
||||
],
|
||||
test_mock={
|
||||
"get_provider": lambda _: MultiProvider(),
|
||||
"get_result": lambda _: "8",
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_provider(openai_api_key: str) -> MultiProvider:
|
||||
# LLM provider
|
||||
settings = OpenAIProvider.default_settings.model_copy()
|
||||
settings.credentials = OpenAICredentials(api_key=SecretStr(openai_api_key))
|
||||
openai_provider = OpenAIProvider(settings=settings)
|
||||
|
||||
multi_provider = MultiProvider()
|
||||
# HACK: Add OpenAI provider to the multi provider with api key
|
||||
multi_provider._provider_instances[ModelProviderName.OPENAI] = openai_provider
|
||||
|
||||
return multi_provider
|
||||
|
||||
@staticmethod
|
||||
def get_result(agent: BlockAgent) -> str:
|
||||
error: Exception | None = None
|
||||
|
||||
for tries in range(3):
|
||||
try:
|
||||
proposal = asyncio.run(agent.propose_action())
|
||||
result = asyncio.run(agent.execute(proposal))
|
||||
return str(result)
|
||||
except Exception as e:
|
||||
error = e
|
||||
|
||||
raise error or Exception("Failed to get result")
|
||||
|
||||
def run(self, input_data: Input) -> BlockOutput:
|
||||
# Set up configuration
|
||||
config = ConfigBuilder.build_config_from_env()
|
||||
# Disable commands
|
||||
config.disabled_commands.extend(input_data.disabled_commands)
|
||||
|
||||
# Storage
|
||||
local = config.file_storage_backend == FileStorageBackendName.LOCAL
|
||||
restrict_to_root = not local or config.restrict_to_workspace
|
||||
file_storage = get_storage(
|
||||
config.file_storage_backend,
|
||||
root_path=Path("data"),
|
||||
restrict_to_root=restrict_to_root,
|
||||
)
|
||||
file_storage.initialize()
|
||||
|
||||
# State
|
||||
state = BlockAgentSettings(
|
||||
agent_id="TemporaryAgentID",
|
||||
name="WrappedAgent",
|
||||
description="Wrapped agent for the Agent Server.",
|
||||
task=f"Your task: {input_data.task}\n" f"Input data: {input_data.input}",
|
||||
enabled_components=input_data.enabled_components,
|
||||
)
|
||||
# Switch big brain mode
|
||||
state.config.big_brain = not input_data.fast_mode
|
||||
provider = self.get_provider(input_data.openai_api_key.get_secret_value())
|
||||
|
||||
agent = BlockAgent(state, provider, file_storage, config)
|
||||
|
||||
result = self.get_result(agent)
|
||||
|
||||
yield "result", result
|
||||
@@ -31,7 +31,7 @@ async def connect(call_count=0):
|
||||
except Exception as e:
|
||||
if call_count <= 5:
|
||||
logger.info(f"[Prisma-{conn_id}] Connection failed: {e}. Retrying now..")
|
||||
await asyncio.sleep(call_count)
|
||||
await asyncio.sleep(2**call_count)
|
||||
await connect(call_count + 1)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import os
|
||||
|
||||
from forge.logging.config import LogFormatName
|
||||
|
||||
|
||||
def configure_logging():
|
||||
import logging
|
||||
|
||||
from forge.logging import configure_logging
|
||||
import autogpt_libs.logging.config
|
||||
|
||||
if os.getenv("APP_ENV") != "cloud":
|
||||
configure_logging()
|
||||
autogpt_libs.logging.config.configure_logging(force_cloud_logging=False)
|
||||
else:
|
||||
configure_logging(log_format=LogFormatName.STRUCTURED)
|
||||
autogpt_libs.logging.config.configure_logging(force_cloud_logging=True)
|
||||
|
||||
# Silence httpx logger
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
|
||||
4859
rnd/autogpt_server/poetry.lock
generated
4859
rnd/autogpt_server/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -10,11 +10,9 @@ readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
agpt = { path = "../../autogpt", develop = true }
|
||||
aio-pika = "^9.4.3"
|
||||
anthropic = "^0.25.1"
|
||||
apscheduler = "^3.10.4"
|
||||
autogpt-forge = { path = "../../forge", develop = true }
|
||||
autogpt-libs = { path = "../autogpt_libs" }
|
||||
click = "^8.1.7"
|
||||
croniter = "^2.0.5"
|
||||
|
||||
@@ -14,18 +14,18 @@ services:
|
||||
- "5432:5432"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
server_base:
|
||||
|
||||
migrate:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
image: autogpt_server:latest
|
||||
command: ["echo", "This is a base image and should not be run directly"]
|
||||
|
||||
migrate:
|
||||
image: autogpt_server:latest
|
||||
command: ["sh", "-c", "until pg_isready -h postgres -U agpt_user -d agpt_local; do echo 'Waiting for postgres...'; sleep 2; done; poetry run prisma migrate deploy"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/migrate
|
||||
action: rebuild
|
||||
command: ["poetry", "run", "prisma", "migrate", "deploy"]
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -40,7 +40,6 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
command: redis-server --requirepass password
|
||||
@@ -48,10 +47,18 @@ services:
|
||||
- "6379:6379"
|
||||
networks:
|
||||
- app-network
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
rest_server:
|
||||
image: autogpt_server:latest
|
||||
command: ["poetry", "run", "rest"]
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.rest"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
@@ -59,11 +66,11 @@ services:
|
||||
action: rebuild
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_started
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
@@ -79,8 +86,11 @@ services:
|
||||
- app-network
|
||||
|
||||
executor:
|
||||
image: autogpt_server:latest
|
||||
command: ["poetry", "run", "executor"]
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.exec"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
@@ -88,11 +98,11 @@ services:
|
||||
action: rebuild
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_started
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
@@ -107,17 +117,23 @@ services:
|
||||
- app-network
|
||||
|
||||
websocket_server:
|
||||
image: autogpt_server:latest
|
||||
command: ["poetry", "run", "ws"]
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.ws"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
- migrate
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
@@ -126,7 +142,7 @@ services:
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
ports:
|
||||
- "8001:8000"
|
||||
- "8001:8001"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
@@ -134,13 +150,20 @@ services:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/market/Dockerfile
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/market/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
- postgres
|
||||
- migrate
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
ports:
|
||||
- "8015:8015"
|
||||
- "8015:8000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
@@ -149,11 +172,20 @@ services:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_builder/Dockerfile
|
||||
target: dev
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_builder/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
- postgres
|
||||
- rest_server
|
||||
- websocket_server
|
||||
- migrate
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rest_server:
|
||||
condition: service_started
|
||||
websocket_server:
|
||||
condition: service_started
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
|
||||
@@ -163,6 +195,7 @@ services:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
networks:
|
||||
app-network:
|
||||
driver: bridge
|
||||
app-network:
|
||||
driver: bridge
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-slim-buster AS server_base
|
||||
FROM python:3.11-slim-buster AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
@@ -6,18 +6,11 @@ ENV PYTHONUNBUFFERED 1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# postgresql-client is needed to check if the postgres service is ready for running migrations
|
||||
# We need to check if the rest of the packages need to be installed
|
||||
# Install build dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
|
||||
&& tar -zxf git.tar.gz \
|
||||
&& cd git-* \
|
||||
&& make prefix=/usr all \
|
||||
&& make prefix=/usr install
|
||||
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
@@ -26,31 +19,43 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN pip3 install poetry
|
||||
|
||||
FROM server_base AS server_dependencies
|
||||
# Copy and install dependencies
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
COPY rnd/market/poetry.lock rnd/market/pyproject.toml /app/rnd/market/
|
||||
WORKDIR /app/rnd/market
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
|
||||
# Generate Prisma client
|
||||
COPY rnd/market /app/rnd/market
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry run prisma generate
|
||||
|
||||
FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
COPY --from=builder /usr/local/bin /usr/local/bin
|
||||
# Copy Prisma binaries
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/autogpt
|
||||
RUN mkdir -p /app/forge
|
||||
RUN mkdir -p /app/rnd/autogpt_libs
|
||||
RUN mkdir -p /app/rnd/market
|
||||
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
|
||||
COPY rnd/market/poetry.lock rnd/market/pyproject.toml /app/rnd/market/
|
||||
COPY rnd/market /app/rnd/market
|
||||
|
||||
WORKDIR /app/rnd/market
|
||||
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
FROM server_dependencies AS server
|
||||
|
||||
FROM server_dependencies AS server_prisma
|
||||
|
||||
# Need the market/utils/partial_types.py
|
||||
COPY rnd/market /app/rnd/market
|
||||
|
||||
COPY rnd/market/schema.prisma ./
|
||||
RUN poetry run prisma generate
|
||||
|
||||
FROM server_prisma AS server
|
||||
|
||||
ENV PORT=8005
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8000
|
||||
|
||||
CMD ["poetry", "run", "app"]
|
||||
CMD ["uvicorn", "market.app:app", "--reload"]
|
||||
|
||||
Reference in New Issue
Block a user