feat(platform): Add LLM registry admin API skeleton - Part 4 of 6

Add admin write API endpoints for LLM registry management:
- POST /api/llm/models - Create model
- PATCH /api/llm/models/{slug} - Update model
- DELETE /api/llm/models/{slug} - Delete model
- POST /api/llm/providers - Create provider
- PATCH /api/llm/providers/{name} - Update provider
- DELETE /api/llm/providers/{name} - Delete provider

All endpoints require admin authentication via requires_admin_user.

Request/response models defined in admin_model.py:
- CreateLlmModelRequest, UpdateLlmModelRequest
- CreateLlmProviderRequest, UpdateLlmProviderRequest

Implementation coming in follow-up commits (currently returns 501 Not Implemented).

This builds on:
- PR #12357: Schema foundation
- PR #12359: Registry core
- PR #12371: Public read API
This commit is contained in:
Bentlybro
2026-03-17 17:30:02 +00:00
parent 90a68084eb
commit d52409c853
4 changed files with 213 additions and 2 deletions

View File

@@ -403,6 +403,11 @@ app.include_router(
tags=["v2", "llm"],
prefix="/api",
)
app.include_router(
backend.server.v2.llm.admin_router,
tags=["v2", "llm", "admin"],
prefix="/api",
)
app.mount("/external-api", external_api)

View File

@@ -1,5 +1,6 @@
"""LLM registry public API."""
"""LLM registry API (public + admin)."""
from .admin_routes import router as admin_router
from .routes import router
__all__ = ["router"]
__all__ = ["router", "admin_router"]

View File

@@ -0,0 +1,84 @@
"""Request/response models for LLM registry admin API."""
from typing import Any
from pydantic import BaseModel, Field
class CreateLlmProviderRequest(BaseModel):
"""Request model for creating an LLM provider."""
name: str = Field(..., description="Provider identifier (e.g., 'openai', 'anthropic')")
display_name: str = Field(..., description="Human-readable provider name")
description: str | None = Field(None, description="Provider description")
default_credential_provider: str | None = Field(
None, description="Default credential system identifier"
)
default_credential_id: str | None = Field(None, description="Default credential ID")
default_credential_type: str | None = Field(None, description="Default credential type")
metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
class UpdateLlmProviderRequest(BaseModel):
"""Request model for updating an LLM provider."""
display_name: str | None = Field(None, description="Human-readable provider name")
description: str | None = Field(None, description="Provider description")
default_credential_provider: str | None = Field(
None, description="Default credential system identifier"
)
default_credential_id: str | None = Field(None, description="Default credential ID")
default_credential_type: str | None = Field(None, description="Default credential type")
metadata: dict[str, Any] | None = Field(None, description="Additional metadata")
class CreateLlmModelRequest(BaseModel):
"""Request model for creating an LLM model."""
slug: str = Field(..., description="Model slug (e.g., 'gpt-4', 'claude-3-opus')")
display_name: str = Field(..., description="Human-readable model name")
description: str | None = Field(None, description="Model description")
provider_id: str = Field(..., description="Provider ID (UUID)")
creator_id: str | None = Field(None, description="Creator ID (UUID)")
context_window: int = Field(..., description="Maximum context window in tokens", gt=0)
max_output_tokens: int | None = Field(
None, description="Maximum output tokens (None if unlimited)", gt=0
)
price_tier: int = Field(..., description="Price tier (1=cheapest, 2=medium, 3=expensive)", ge=1, le=3)
is_enabled: bool = Field(default=True, description="Whether the model is enabled")
is_recommended: bool = Field(default=False, description="Whether the model is recommended")
supports_tools: bool = Field(default=False, description="Supports function calling")
supports_json_output: bool = Field(default=False, description="Supports JSON output mode")
supports_reasoning: bool = Field(default=False, description="Supports reasoning mode")
supports_parallel_tool_calls: bool = Field(default=False, description="Supports parallel tool calls")
capabilities: dict[str, Any] = Field(
default_factory=dict, description="Additional capabilities"
)
metadata: dict[str, Any] = Field(default_factory=dict, description="Additional metadata")
class UpdateLlmModelRequest(BaseModel):
"""Request model for updating an LLM model."""
display_name: str | None = Field(None, description="Human-readable model name")
description: str | None = Field(None, description="Model description")
creator_id: str | None = Field(None, description="Creator ID (UUID)")
context_window: int | None = Field(
None, description="Maximum context window in tokens", gt=0
)
max_output_tokens: int | None = Field(
None, description="Maximum output tokens (None if unlimited)", gt=0
)
price_tier: int | None = Field(
None, description="Price tier (1=cheapest, 2=medium, 3=expensive)", ge=1, le=3
)
is_enabled: bool | None = Field(None, description="Whether the model is enabled")
is_recommended: bool | None = Field(None, description="Whether the model is recommended")
supports_tools: bool | None = Field(None, description="Supports function calling")
supports_json_output: bool | None = Field(None, description="Supports JSON output mode")
supports_reasoning: bool | None = Field(None, description="Supports reasoning mode")
supports_parallel_tool_calls: bool | None = Field(
None, description="Supports parallel tool calls"
)
capabilities: dict[str, Any] | None = Field(None, description="Additional capabilities")
metadata: dict[str, Any] | None = Field(None, description="Additional metadata")

View File

@@ -0,0 +1,121 @@
"""Admin write API for LLM registry management.
Provides endpoints for creating, updating, and deleting:
- Models
- Providers
- Costs
- Creators
- Migrations
All endpoints require admin authentication.
"""
from typing import Any
import autogpt_libs.auth
from fastapi import APIRouter, HTTPException, Security, status
from backend.server.v2.llm.admin_model import (
CreateLlmModelRequest,
CreateLlmProviderRequest,
UpdateLlmModelRequest,
UpdateLlmProviderRequest,
)
router = APIRouter()
@router.post(
"/llm/models",
status_code=status.HTTP_201_CREATED,
dependencies=[Security(autogpt_libs.auth.requires_admin_user)],
)
async def create_model(
request: CreateLlmModelRequest,
) -> dict[str, Any]:
"""Create a new LLM model.
Requires admin authentication.
"""
# TODO: Implement model creation
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Model creation not yet implemented",
)
@router.patch("/llm/models/{slug}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)])
async def update_model(
slug: str,
request: UpdateLlmModelRequest,
) -> dict[str, Any]:
"""Update an existing LLM model.
Requires admin authentication.
"""
# TODO: Implement model update
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Model update not yet implemented",
)
@router.delete("/llm/models/{slug}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)], status_code=status.HTTP_204_NO_CONTENT)
async def delete_model(
slug: str,
) -> None:
"""Delete an LLM model.
Requires admin authentication.
"""
# TODO: Implement model deletion
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Model deletion not yet implemented",
)
@router.post("/llm/providers", status_code=status.HTTP_201_CREATED)
async def create_provider(
request: CreateLlmProviderRequest,
) -> dict[str, Any]:
"""Create a new LLM provider.
Requires admin authentication.
"""
# TODO: Implement provider creation
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Provider creation not yet implemented",
)
@router.patch("/llm/providers/{name}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)])
async def update_provider(
name: str,
request: UpdateLlmProviderRequest,
) -> dict[str, Any]:
"""Update an existing LLM provider.
Requires admin authentication.
"""
# TODO: Implement provider update
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Provider update not yet implemented",
)
@router.delete("/llm/providers/{name}", dependencies=[Security(autogpt_libs.auth.requires_admin_user)], status_code=status.HTTP_204_NO_CONTENT)
async def delete_provider(
name: str,
) -> None:
"""Delete an LLM provider.
Requires admin authentication.
"""
# TODO: Implement provider deletion
raise HTTPException(
status_code=status.HTTP_501_NOT_IMPLEMENTED,
detail="Provider deletion not yet implemented",
)