diff --git a/autogpt_platform/backend/backend/api/rest_api.py b/autogpt_platform/backend/backend/api/rest_api.py index 3bd50c78ba..6984b1b28f 100644 --- a/autogpt_platform/backend/backend/api/rest_api.py +++ b/autogpt_platform/backend/backend/api/rest_api.py @@ -394,6 +394,11 @@ app.include_router( tags=["v2", "llm"], prefix="/api", ) +app.include_router( + backend.server.v2.llm.admin_router, + tags=["v2", "llm", "admin"], + prefix="/api", +) app.mount("/external-api", external_api) diff --git a/autogpt_platform/backend/backend/server/v2/llm/__init__.py b/autogpt_platform/backend/backend/server/v2/llm/__init__.py index c69da22392..cf3d75f7ba 100644 --- a/autogpt_platform/backend/backend/server/v2/llm/__init__.py +++ b/autogpt_platform/backend/backend/server/v2/llm/__init__.py @@ -1,5 +1,6 @@ -"""LLM registry public API.""" +"""LLM registry API (public + admin).""" +from .admin_routes import router as admin_router from .routes import router -__all__ = ["router"] +__all__ = ["router", "admin_router"] diff --git a/autogpt_platform/backend/backend/server/v2/llm/admin_model.py b/autogpt_platform/backend/backend/server/v2/llm/admin_model.py new file mode 100644 index 0000000000..fab77c2520 --- /dev/null +++ b/autogpt_platform/backend/backend/server/v2/llm/admin_model.py @@ -0,0 +1,84 @@ +"""Request/response models for LLM registry admin API.""" + +from typing import Any + +from pydantic import BaseModel, Field + + +class CreateLlmProviderRequest(BaseModel): + """Request model for creating an LLM provider.""" + + name: str = Field(..., description="Provider identifier (e.g., 'openai', 'anthropic')") + display_name: str = Field(..., description="Human-readable provider name") + description: str | None = Field(None, description="Provider description") + default_credential_provider: str | None = Field( + None, description="Default credential system identifier" + ) + default_credential_id: str | None = Field(None, description="Default credential ID") + default_credential_type: str | None = Field(None, description="Default credential type") + metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata") + + +class UpdateLlmProviderRequest(BaseModel): + """Request model for updating an LLM provider.""" + + display_name: str | None = Field(None, description="Human-readable provider name") + description: str | None = Field(None, description="Provider description") + default_credential_provider: str | None = Field( + None, description="Default credential system identifier" + ) + default_credential_id: str | None = Field(None, description="Default credential ID") + default_credential_type: str | None = Field(None, description="Default credential type") + metadata: dict[str, Any] | None = Field(None, description="Additional metadata") + + +class CreateLlmModelRequest(BaseModel): + """Request model for creating an LLM model.""" + + slug: str = Field(..., description="Model slug (e.g., 'gpt-4', 'claude-3-opus')") + display_name: str = Field(..., description="Human-readable model name") + description: str | None = Field(None, description="Model description") + provider_id: str = Field(..., description="Provider ID (UUID)") + creator_id: str | None = Field(None, description="Creator ID (UUID)") + context_window: int = Field(..., description="Maximum context window in tokens", gt=0) + max_output_tokens: int | None = Field( + None, description="Maximum output tokens (None if unlimited)", gt=0 + ) + price_tier: int = Field(..., description="Price tier (1=cheapest, 2=medium, 3=expensive)", ge=1, le=3) + is_enabled: bool = Field(default=True, description="Whether the model is enabled") + is_recommended: bool = Field(default=False, description="Whether the model is recommended") + supports_tools: bool = Field(default=False, description="Supports function calling") + supports_json_output: bool = Field(default=False, description="Supports JSON output mode") + supports_reasoning: bool = Field(default=False, description="Supports reasoning mode") + supports_parallel_tool_calls: bool = Field(default=False, description="Supports parallel tool calls") + capabilities: dict[str, Any] = Field( + default_factory=dict, description="Additional capabilities" + ) + metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata") + + +class UpdateLlmModelRequest(BaseModel): + """Request model for updating an LLM model.""" + + display_name: str | None = Field(None, description="Human-readable model name") + description: str | None = Field(None, description="Model description") + creator_id: str | None = Field(None, description="Creator ID (UUID)") + context_window: int | None = Field( + None, description="Maximum context window in tokens", gt=0 + ) + max_output_tokens: int | None = Field( + None, description="Maximum output tokens (None if unlimited)", gt=0 + ) + price_tier: int | None = Field( + None, description="Price tier (1=cheapest, 2=medium, 3=expensive)", ge=1, le=3 + ) + is_enabled: bool | None = Field(None, description="Whether the model is enabled") + is_recommended: bool | None = Field(None, description="Whether the model is recommended") + supports_tools: bool | None = Field(None, description="Supports function calling") + supports_json_output: bool | None = Field(None, description="Supports JSON output mode") + supports_reasoning: bool | None = Field(None, description="Supports reasoning mode") + supports_parallel_tool_calls: bool | None = Field( + None, description="Supports parallel tool calls" + ) + capabilities: dict[str, Any] | None = Field(None, description="Additional capabilities") + metadata: dict[str, Any] | None = Field(None, description="Additional metadata") diff --git a/autogpt_platform/backend/backend/server/v2/llm/admin_routes.py b/autogpt_platform/backend/backend/server/v2/llm/admin_routes.py new file mode 100644 index 0000000000..29aee4c86e --- /dev/null +++ b/autogpt_platform/backend/backend/server/v2/llm/admin_routes.py @@ -0,0 +1,121 @@ +"""Admin write API for LLM registry management. + +Provides endpoints for creating, updating, and deleting: +- Models +- Providers +- Costs +- Creators +- Migrations + +All endpoints require admin authentication. +""" + +from typing import Any + +import autogpt_libs.auth +from fastapi import APIRouter, HTTPException, Security, status + +from backend.server.v2.llm.admin_model import ( + CreateLlmModelRequest, + CreateLlmProviderRequest, + UpdateLlmModelRequest, + UpdateLlmProviderRequest, +) + +router = APIRouter() + + +@router.post( + "/llm/models", + status_code=status.HTTP_201_CREATED, + dependencies=[Security(autogpt_libs.auth.requires_admin_user)], +) +async def create_model( + request: CreateLlmModelRequest, +) -> dict[str, Any]: + """Create a new LLM model. + + Requires admin authentication. + """ + # TODO: Implement model creation + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Model creation not yet implemented", + ) + + +@router.patch("/llm/models/{slug}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)]) +async def update_model( + slug: str, + request: UpdateLlmModelRequest, +) -> dict[str, Any]: + """Update an existing LLM model. + + Requires admin authentication. + """ + # TODO: Implement model update + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Model update not yet implemented", + ) + + +@router.delete("/llm/models/{slug}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)], status_code=status.HTTP_204_NO_CONTENT) +async def delete_model( + slug: str, +) -> None: + """Delete an LLM model. + + Requires admin authentication. + """ + # TODO: Implement model deletion + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Model deletion not yet implemented", + ) + + +@router.post("/llm/providers", status_code=status.HTTP_201_CREATED) +async def create_provider( + request: CreateLlmProviderRequest, +) -> dict[str, Any]: + """Create a new LLM provider. + + Requires admin authentication. + """ + # TODO: Implement provider creation + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Provider creation not yet implemented", + ) + + +@router.patch("/llm/providers/{name}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)]) +async def update_provider( + name: str, + request: UpdateLlmProviderRequest, +) -> dict[str, Any]: + """Update an existing LLM provider. + + Requires admin authentication. + """ + # TODO: Implement provider update + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Provider update not yet implemented", + ) + + +@router.delete("/llm/providers/{name}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)], status_code=status.HTTP_204_NO_CONTENT) +async def delete_provider( + name: str, +) -> None: + """Delete an LLM provider. + + Requires admin authentication. + """ + # TODO: Implement provider deletion + raise HTTPException( + status_code=status.HTTP_501_NOT_IMPLEMENTED, + detail="Provider deletion not yet implemented", + )