Add LLM model creator support to registry and admin UI

Introduces the LlmModelCreator entity to distinguish model creators (e.g., OpenAI, Meta) from providers, with full CRUD API endpoints, database migration, and Prisma schema updates. Backend and frontend are updated to support associating models with creators, including admin UI for managing creators and selecting them when creating or editing models. Existing models are backfilled with known creators via migration.
This commit is contained in:
Bentlybro
2026-01-05 10:17:00 +00:00
parent 52c7b223df
commit 2e3fc99caa
23 changed files with 1202 additions and 82 deletions

View File

@@ -4,7 +4,7 @@ import logging
import re
import secrets
from abc import ABC
from enum import Enum, EnumMeta
from enum import Enum
from json import JSONDecodeError
from typing import Any, Iterable, List, Literal, Optional
@@ -13,7 +13,8 @@ import ollama
import openai
from anthropic.types import ToolParam
from groq import AsyncGroq
from pydantic import BaseModel, SecretStr
from pydantic import BaseModel, GetCoreSchemaHandler, SecretStr
from pydantic_core import CoreSchema, core_schema
from backend.data import llm_registry
from backend.data.block import (
@@ -88,46 +89,81 @@ def llm_model_schema_extra() -> dict[str, Any]:
return {"options": llm_registry.get_llm_model_schema_options()}
class LlmModelMeta(EnumMeta):
pass
class LlmModelMeta(type):
"""
Metaclass for LlmModel that enables attribute-style access to dynamic models.
This allows code like `LlmModel.GPT4O` to work by converting the attribute
name to a slug format:
- GPT4O -> gpt-4o
- GPT4O_MINI -> gpt-4o-mini
- CLAUDE_3_5_SONNET -> claude-3-5-sonnet
"""
def __getattr__(cls, name: str):
# Don't intercept private/dunder attributes
if name.startswith("_"):
raise AttributeError(f"type object 'LlmModel' has no attribute '{name}'")
# Convert attribute name to slug format:
# 1. Lowercase: GPT4O -> gpt4o
# 2. Underscores to hyphens: GPT4O_MINI -> gpt4o-mini
# 3. Insert hyphen between letter and digit: gpt4o -> gpt-4o
slug = name.lower().replace("_", "-")
slug = re.sub(r"([a-z])(\d)", r"\1-\2", slug)
return cls(slug)
class LlmModel(str, Enum, metaclass=LlmModelMeta):
class LlmModel(str, metaclass=LlmModelMeta):
"""
Dynamic LLM model enum that accepts any model slug from the registry.
This enum no longer contains hardcoded model values. All models are now
managed via the LLM Registry in the database. The _missing_() method allows
any string value to be used, making it fully dynamic.
For backwards compatibility and type hints, you can still use this enum,
but model slugs should come from the registry, not hardcoded enum members.
Dynamic LLM model type that accepts any model slug from the registry.
This is a string subclass (not an Enum) that allows any model slug value.
All models are managed via the LLM Registry in the database.
Usage:
model = LlmModel("gpt-4o") # Direct construction
model = LlmModel.GPT4O # Attribute access (converted to "gpt-4o")
model.value # Returns the slug string
model.provider # Returns the provider from registry
"""
def __new__(cls, value: str):
if isinstance(value, LlmModel):
return value
return str.__new__(cls, value)
@classmethod
def _missing_(cls, value):
def __get_pydantic_core_schema__(
cls, source_type: Any, handler: GetCoreSchemaHandler
) -> CoreSchema:
"""
Allow any string value to be used as an LlmModel enum member.
This makes the enum fully dynamic - it accepts any model slug from
the database registry, not just hardcoded values.
Tell Pydantic how to validate LlmModel.
Accepts strings and converts them to LlmModel instances.
"""
if isinstance(value, str):
pseudo_member = str.__new__(cls, value)
pseudo_member._name_ = value.upper().replace("-", "_").replace("/", "_").replace(".", "_")
pseudo_member._value_ = value
return pseudo_member
return super()._missing_(value)
return core_schema.no_info_after_validator_function(
cls, # The validator function (LlmModel constructor)
core_schema.str_schema(), # Accept string input
serialization=core_schema.to_string_ser_schema(), # Serialize as string
)
@property
def value(self) -> str:
"""Return the model slug (for compatibility with enum-style access)."""
return str(self)
@classmethod
def default(cls) -> "LlmModel":
"""
Get the default model from the registry.
Returns the preferred default model (gpt-4o if available and enabled,
otherwise the first enabled model from the registry).
"""
from backend.data.llm_registry import get_default_model_slug
return cls(get_default_model_slug())
@property

View File

@@ -157,6 +157,7 @@ class SmartDecisionMakerBlock(Block):
default_factory=llm.LlmModel.default,
description="The language model to use for answering the prompt.",
advanced=False,
json_schema_extra=llm.llm_model_schema_extra(),
)
credentials: llm.AICredentials = llm.AICredentialsField()
multiple_tool_calls: bool = SchemaField(

View File

@@ -11,6 +11,7 @@ from backend.data.llm_registry import schema_utils
from backend.data.llm_registry.registry import (
RegistryModel,
RegistryModelCost,
RegistryModelCreator,
get_all_model_slugs_for_validation,
get_default_model_slug,
get_dynamic_model_slugs,
@@ -45,6 +46,7 @@ __all__ = [
"ModelMetadata",
"RegistryModel",
"RegistryModelCost",
"RegistryModelCreator",
# Registry functions
"get_all_model_slugs_for_validation",
"get_default_model_slug",

View File

@@ -26,6 +26,18 @@ class RegistryModelCost:
metadata: dict[str, Any]
@dataclass(frozen=True)
class RegistryModelCreator:
"""Creator information for an LLM model."""
id: str
name: str
display_name: str
description: str | None
website_url: str | None
logo_url: str | None
@dataclass(frozen=True)
class RegistryModel:
"""Represents a model in the LLM registry."""
@@ -39,6 +51,7 @@ class RegistryModel:
provider_display_name: str
is_enabled: bool
costs: tuple[RegistryModelCost, ...] = field(default_factory=tuple)
creator: RegistryModelCreator | None = None
_static_metadata: dict[str, ModelMetadata] = {}
@@ -97,6 +110,7 @@ async def refresh_llm_registry() -> None:
include={
"Provider": True,
"Costs": True,
"Creator": True,
}
)
logger.debug("Found %d LLM model records in database", len(records))
@@ -128,6 +142,18 @@ async def refresh_llm_registry() -> None:
for cost in (record.Costs or [])
)
# Map creator if present
creator = None
if record.Creator:
creator = RegistryModelCreator(
id=record.Creator.id,
name=record.Creator.name,
display_name=record.Creator.displayName,
description=record.Creator.description,
website_url=record.Creator.websiteUrl,
logo_url=record.Creator.logoUrl,
)
dynamic[record.slug] = RegistryModel(
slug=record.slug,
display_name=record.displayName,
@@ -142,6 +168,7 @@ async def refresh_llm_registry() -> None:
),
is_enabled=record.isEnabled,
costs=costs,
creator=creator,
)
_dynamic_models.clear()

View File

@@ -10,6 +10,7 @@ from typing import Any
from backend.data.llm_registry.registry import (
get_all_model_slugs_for_validation,
get_default_model_slug,
get_llm_discriminator_mapping,
get_llm_model_schema_options,
)
@@ -62,6 +63,12 @@ def refresh_llm_model_options(field_schema: dict[str, Any]) -> None:
combined_enum = existing_enum | all_known_slugs
field_schema["enum"] = sorted(combined_enum)
# Set the default value from the registry (gpt-4o if available, else first enabled)
# This ensures new blocks have a sensible default pre-selected
default_slug = get_default_model_slug()
if default_slug:
field_schema["default"] = default_slug
def refresh_llm_discriminator_mapping(field_schema: dict[str, Any]) -> None:
"""

View File

@@ -337,3 +337,131 @@ async def revert_llm_migration(migration_id: str):
status_code=500,
detail="Failed to revert migration",
) from exc
# ============================================================================
# Creator Management Endpoints
# ============================================================================
@router.get(
"/creators",
summary="List model creators",
response_model=llm_model.LlmCreatorsResponse,
)
async def list_llm_creators():
"""
List all model creators.
Creators are organizations that create/train models (e.g., OpenAI, Meta, Anthropic).
This is distinct from providers who host/serve the models (e.g., OpenRouter).
"""
try:
creators = await llm_db.list_creators()
return llm_model.LlmCreatorsResponse(creators=creators)
except Exception as exc:
logger.exception("Failed to list creators: %s", exc)
raise fastapi.HTTPException(
status_code=500,
detail="Failed to list creators",
) from exc
@router.get(
"/creators/{creator_id}",
summary="Get creator details",
response_model=llm_model.LlmModelCreator,
)
async def get_llm_creator(creator_id: str):
"""Get details of a specific model creator."""
try:
creator = await llm_db.get_creator(creator_id)
if not creator:
raise fastapi.HTTPException(
status_code=404, detail=f"Creator '{creator_id}' not found"
)
return creator
except fastapi.HTTPException:
raise
except Exception as exc:
logger.exception("Failed to get creator %s: %s", creator_id, exc)
raise fastapi.HTTPException(
status_code=500,
detail="Failed to get creator",
) from exc
@router.post(
"/creators",
summary="Create model creator",
response_model=llm_model.LlmModelCreator,
)
async def create_llm_creator(request: llm_model.UpsertLlmCreatorRequest):
"""
Create a new model creator.
A creator represents an organization that creates/trains AI models,
such as OpenAI, Anthropic, Meta, or Google.
"""
try:
creator = await llm_db.upsert_creator(request=request)
await _refresh_runtime_state()
logger.info("Created model creator '%s' (%s)", creator.display_name, creator.id)
return creator
except Exception as exc:
logger.exception("Failed to create creator: %s", exc)
raise fastapi.HTTPException(
status_code=500,
detail="Failed to create creator",
) from exc
@router.patch(
"/creators/{creator_id}",
summary="Update model creator",
response_model=llm_model.LlmModelCreator,
)
async def update_llm_creator(
creator_id: str,
request: llm_model.UpsertLlmCreatorRequest,
):
"""Update an existing model creator."""
try:
creator = await llm_db.upsert_creator(request=request, creator_id=creator_id)
await _refresh_runtime_state()
logger.info("Updated model creator '%s' (%s)", creator.display_name, creator_id)
return creator
except Exception as exc:
logger.exception("Failed to update creator %s: %s", creator_id, exc)
raise fastapi.HTTPException(
status_code=500,
detail="Failed to update creator",
) from exc
@router.delete(
"/creators/{creator_id}",
summary="Delete model creator",
response_model=dict,
)
async def delete_llm_creator(creator_id: str):
"""
Delete a model creator.
This will remove the creator association from all models that reference it
(sets creatorId to NULL), but will not delete the models themselves.
"""
try:
await llm_db.delete_creator(creator_id)
await _refresh_runtime_state()
logger.info("Deleted model creator '%s'", creator_id)
return {"success": True, "message": f"Creator '{creator_id}' deleted"}
except ValueError as exc:
logger.warning("Creator deletion validation failed: %s", exc)
raise fastapi.HTTPException(status_code=404, detail=str(exc)) from exc
except Exception as exc:
logger.exception("Failed to delete creator %s: %s", creator_id, exc)
raise fastapi.HTTPException(
status_code=500,
detail="Failed to delete creator",
) from exc

View File

@@ -8,6 +8,7 @@ from backend.blocks import load_all_blocks
from backend.blocks.llm import LlmModel
from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema
from backend.data.db import query_raw_with_schema
from backend.data.llm_registry import get_all_model_slugs_for_validation
from backend.integrations.providers import ProviderName
from backend.server.v2.builder.model import (
BlockCategoryResponse,
@@ -22,7 +23,6 @@ from backend.util.cache import cached
from backend.util.models import Pagination
logger = logging.getLogger(__name__)
llm_models = [name.name.lower().replace("_", " ") for name in LlmModel]
_static_counts_cache: dict | None = None
_suggested_blocks: list[BlockInfo] | None = None

View File

@@ -29,17 +29,37 @@ def _map_cost(record: prisma.models.LlmModelCost) -> llm_model.LlmModelCost:
)
def _map_creator(
record: prisma.models.LlmModelCreator,
) -> llm_model.LlmModelCreator:
return llm_model.LlmModelCreator(
id=record.id,
name=record.name,
display_name=record.displayName,
description=record.description,
website_url=record.websiteUrl,
logo_url=record.logoUrl,
metadata=_json_dict(record.metadata),
)
def _map_model(record: prisma.models.LlmModel) -> llm_model.LlmModel:
costs = []
if record.Costs:
costs = [_map_cost(cost) for cost in record.Costs]
creator = None
if hasattr(record, "Creator") and record.Creator:
creator = _map_creator(record.Creator)
return llm_model.LlmModel(
id=record.id,
slug=record.slug,
display_name=record.displayName,
description=record.description,
provider_id=record.providerId,
creator_id=record.creatorId,
creator=creator,
context_window=record.contextWindow,
max_output_tokens=record.maxOutputTokens,
is_enabled=record.isEnabled,
@@ -83,7 +103,12 @@ async def list_providers(
"""
if include_models:
model_where = {"isEnabled": True} if enabled_only else None
include = {"Models": {"include": {"Costs": True}, "where": model_where}}
include = {
"Models": {
"include": {"Costs": True, "Creator": True},
"where": model_where,
}
}
else:
include = None
records = await prisma.models.LlmProvider.prisma().find_many(include=include)
@@ -107,16 +132,17 @@ async def upsert_provider(
"supportsParallelTool": request.supports_parallel_tool,
"metadata": request.metadata,
}
include = {"Models": {"include": {"Costs": True, "Creator": True}}}
if provider_id:
record = await prisma.models.LlmProvider.prisma().update(
where={"id": provider_id},
data=data,
include={"Models": {"include": {"Costs": True}}},
include=include,
)
else:
record = await prisma.models.LlmProvider.prisma().create(
data=data,
include={"Models": {"include": {"Costs": True}}},
include=include,
)
return _map_provider(record)
@@ -139,7 +165,7 @@ async def list_models(
records = await prisma.models.LlmModel.prisma().find_many(
where=where if where else None,
include={"Costs": True},
include={"Costs": True, "Creator": True},
)
return [_map_model(record) for record in records]
@@ -166,20 +192,24 @@ def _cost_create_payload(
async def create_model(
request: llm_model.CreateLlmModelRequest,
) -> llm_model.LlmModel:
data: dict[str, Any] = {
"slug": request.slug,
"displayName": request.display_name,
"description": request.description,
"providerId": request.provider_id,
"contextWindow": request.context_window,
"maxOutputTokens": request.max_output_tokens,
"isEnabled": request.is_enabled,
"capabilities": request.capabilities,
"metadata": request.metadata,
"Costs": _cost_create_payload(request.costs),
}
if request.creator_id:
data["creatorId"] = request.creator_id
record = await prisma.models.LlmModel.prisma().create(
data={
"slug": request.slug,
"displayName": request.display_name,
"description": request.description,
"providerId": request.provider_id,
"contextWindow": request.context_window,
"maxOutputTokens": request.max_output_tokens,
"isEnabled": request.is_enabled,
"capabilities": request.capabilities,
"metadata": request.metadata,
"Costs": _cost_create_payload(request.costs),
},
include={"Costs": True},
data=data,
include={"Costs": True, "Creator": True},
)
return _map_model(record)
@@ -205,6 +235,9 @@ async def update_model(
data["metadata"] = request.metadata
if request.provider_id is not None:
data["providerId"] = request.provider_id
if request.creator_id is not None:
# Allow setting to None to remove creator association
data["creatorId"] = request.creator_id if request.creator_id else None
if request.costs is not None:
data["Costs"] = {
"deleteMany": {"llmModelId": model_id},
@@ -214,7 +247,7 @@ async def update_model(
record = await prisma.models.LlmModel.prisma().update(
where={"id": model_id},
data=data,
include={"Costs": True},
include={"Costs": True, "Creator": True},
)
return _map_model(record)
@@ -600,3 +633,72 @@ async def revert_migration(migration_id: str) -> llm_model.RevertMigrationRespon
nodes_reverted=nodes_reverted,
message=message,
)
# ============================================================================
# Creator CRUD operations
# ============================================================================
async def list_creators() -> list[llm_model.LlmModelCreator]:
"""List all LLM model creators."""
records = await prisma.models.LlmModelCreator.prisma().find_many(
order={"displayName": "asc"}
)
return [_map_creator(record) for record in records]
async def get_creator(creator_id: str) -> llm_model.LlmModelCreator | None:
"""Get a specific creator by ID."""
record = await prisma.models.LlmModelCreator.prisma().find_unique(
where={"id": creator_id}
)
return _map_creator(record) if record else None
async def upsert_creator(
request: llm_model.UpsertLlmCreatorRequest,
creator_id: str | None = None,
) -> llm_model.LlmModelCreator:
"""Create or update a model creator."""
data = {
"name": request.name,
"displayName": request.display_name,
"description": request.description,
"websiteUrl": request.website_url,
"logoUrl": request.logo_url,
"metadata": request.metadata,
}
if creator_id:
record = await prisma.models.LlmModelCreator.prisma().update(
where={"id": creator_id},
data=data,
)
else:
record = await prisma.models.LlmModelCreator.prisma().create(data=data)
return _map_creator(record)
async def delete_creator(creator_id: str) -> bool:
"""
Delete a model creator.
This will set creatorId to NULL on all associated models (due to onDelete: SetNull).
Args:
creator_id: UUID of the creator to delete
Returns:
True if deleted successfully
Raises:
ValueError: If creator not found
"""
creator = await prisma.models.LlmModelCreator.prisma().find_unique(
where={"id": creator_id}
)
if not creator:
raise ValueError(f"Creator with id '{creator_id}' not found")
await prisma.models.LlmModelCreator.prisma().delete(where={"id": creator_id})
return True

View File

@@ -17,12 +17,26 @@ class LlmModelCost(pydantic.BaseModel):
metadata: dict[str, Any] = pydantic.Field(default_factory=dict)
class LlmModelCreator(pydantic.BaseModel):
"""Represents the organization that created/trained the model (e.g., OpenAI, Meta)."""
id: str
name: str
display_name: str
description: Optional[str] = None
website_url: Optional[str] = None
logo_url: Optional[str] = None
metadata: dict[str, Any] = pydantic.Field(default_factory=dict)
class LlmModel(pydantic.BaseModel):
id: str
slug: str
display_name: str
description: Optional[str] = None
provider_id: str
creator_id: Optional[str] = None
creator: Optional[LlmModelCreator] = None
context_window: int
max_output_tokens: Optional[int] = None
is_enabled: bool = True
@@ -55,6 +69,10 @@ class LlmModelsResponse(pydantic.BaseModel):
models: list[LlmModel]
class LlmCreatorsResponse(pydantic.BaseModel):
creators: list[LlmModelCreator]
class UpsertLlmProviderRequest(pydantic.BaseModel):
name: str
display_name: str
@@ -69,6 +87,15 @@ class UpsertLlmProviderRequest(pydantic.BaseModel):
metadata: dict[str, Any] = pydantic.Field(default_factory=dict)
class UpsertLlmCreatorRequest(pydantic.BaseModel):
name: str
display_name: str
description: Optional[str] = None
website_url: Optional[str] = None
logo_url: Optional[str] = None
metadata: dict[str, Any] = pydantic.Field(default_factory=dict)
class LlmModelCostInput(pydantic.BaseModel):
unit: prisma.enums.LlmCostUnit = prisma.enums.LlmCostUnit.RUN
credit_cost: int
@@ -84,6 +111,7 @@ class CreateLlmModelRequest(pydantic.BaseModel):
display_name: str
description: Optional[str] = None
provider_id: str
creator_id: Optional[str] = None
context_window: int
max_output_tokens: Optional[int] = None
is_enabled: bool = True
@@ -101,6 +129,7 @@ class UpdateLlmModelRequest(pydantic.BaseModel):
capabilities: Optional[dict[str, Any]] = None
metadata: Optional[dict[str, Any]] = None
provider_id: Optional[str] = None
creator_id: Optional[str] = None
costs: Optional[list[LlmModelCostInput]] = None

View File

@@ -0,0 +1,127 @@
-- Add LlmModelCreator table
-- Creator represents who made/trained the model (e.g., OpenAI, Meta)
-- This is distinct from Provider who hosts/serves the model (e.g., OpenRouter)
-- Create the LlmModelCreator table
CREATE TABLE "LlmModelCreator" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"name" TEXT NOT NULL,
"displayName" TEXT NOT NULL,
"description" TEXT,
"websiteUrl" TEXT,
"logoUrl" TEXT,
"metadata" JSONB NOT NULL DEFAULT '{}',
CONSTRAINT "LlmModelCreator_pkey" PRIMARY KEY ("id")
);
-- Create unique index on name
CREATE UNIQUE INDEX "LlmModelCreator_name_key" ON "LlmModelCreator"("name");
-- Add creatorId column to LlmModel
ALTER TABLE "LlmModel" ADD COLUMN "creatorId" TEXT;
-- Add foreign key constraint
ALTER TABLE "LlmModel" ADD CONSTRAINT "LlmModel_creatorId_fkey"
FOREIGN KEY ("creatorId") REFERENCES "LlmModelCreator"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- Create index on creatorId
CREATE INDEX "LlmModel_creatorId_idx" ON "LlmModel"("creatorId");
-- Seed creators based on known model creators
INSERT INTO "LlmModelCreator" ("id", "updatedAt", "name", "displayName", "description", "websiteUrl", "metadata")
VALUES
(gen_random_uuid(), CURRENT_TIMESTAMP, 'openai', 'OpenAI', 'Creator of GPT models', 'https://openai.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'anthropic', 'Anthropic', 'Creator of Claude models', 'https://anthropic.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'meta', 'Meta', 'Creator of Llama models', 'https://ai.meta.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'google', 'Google', 'Creator of Gemini models', 'https://deepmind.google', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'mistral', 'Mistral AI', 'Creator of Mistral models', 'https://mistral.ai', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'cohere', 'Cohere', 'Creator of Command models', 'https://cohere.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'deepseek', 'DeepSeek', 'Creator of DeepSeek models', 'https://deepseek.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'perplexity', 'Perplexity AI', 'Creator of Sonar models', 'https://perplexity.ai', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'qwen', 'Qwen (Alibaba)', 'Creator of Qwen models', 'https://qwenlm.github.io', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'xai', 'xAI', 'Creator of Grok models', 'https://x.ai', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'amazon', 'Amazon', 'Creator of Nova models', 'https://aws.amazon.com/bedrock', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'microsoft', 'Microsoft', 'Creator of WizardLM models', 'https://microsoft.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'moonshot', 'Moonshot AI', 'Creator of Kimi models', 'https://moonshot.cn', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'nvidia', 'NVIDIA', 'Creator of Nemotron models', 'https://nvidia.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'nous_research', 'Nous Research', 'Creator of Hermes models', 'https://nousresearch.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'vercel', 'Vercel', 'Creator of v0 models', 'https://vercel.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'cognitive_computations', 'Cognitive Computations', 'Creator of Dolphin models', 'https://erichartford.com', '{}'),
(gen_random_uuid(), CURRENT_TIMESTAMP, 'gryphe', 'Gryphe', 'Creator of MythoMax models', 'https://huggingface.co/Gryphe', '{}')
ON CONFLICT ("name") DO NOTHING;
-- Update existing models with their creators
-- OpenAI models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'openai')
WHERE "slug" LIKE 'gpt-%' OR "slug" LIKE 'o1%' OR "slug" LIKE 'o3%' OR "slug" LIKE 'openai/%';
-- Anthropic models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'anthropic')
WHERE "slug" LIKE 'claude-%';
-- Meta/Llama models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'meta')
WHERE "slug" LIKE 'llama%' OR "slug" LIKE 'Llama%' OR "slug" LIKE 'meta-llama/%' OR "slug" LIKE '%/llama-%';
-- Google models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'google')
WHERE "slug" LIKE 'google/%' OR "slug" LIKE 'gemini%';
-- Mistral models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'mistral')
WHERE "slug" LIKE 'mistral%' OR "slug" LIKE 'mistralai/%';
-- Cohere models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'cohere')
WHERE "slug" LIKE 'cohere/%' OR "slug" LIKE 'command-%';
-- DeepSeek models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'deepseek')
WHERE "slug" LIKE 'deepseek/%' OR "slug" LIKE 'deepseek-%';
-- Perplexity models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'perplexity')
WHERE "slug" LIKE 'perplexity/%' OR "slug" LIKE 'sonar%';
-- Qwen models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'qwen')
WHERE "slug" LIKE 'Qwen/%' OR "slug" LIKE 'qwen/%';
-- xAI/Grok models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'xai')
WHERE "slug" LIKE 'x-ai/%' OR "slug" LIKE 'grok%';
-- Amazon models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'amazon')
WHERE "slug" LIKE 'amazon/%' OR "slug" LIKE 'nova-%';
-- Microsoft models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'microsoft')
WHERE "slug" LIKE 'microsoft/%' OR "slug" LIKE 'wizardlm%';
-- Moonshot models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'moonshot')
WHERE "slug" LIKE 'moonshotai/%' OR "slug" LIKE 'kimi%';
-- NVIDIA models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'nvidia')
WHERE "slug" LIKE 'nvidia/%' OR "slug" LIKE '%nemotron%';
-- Nous Research models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'nous_research')
WHERE "slug" LIKE 'nousresearch/%' OR "slug" LIKE 'hermes%';
-- Vercel/v0 models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'vercel')
WHERE "slug" LIKE 'v0-%';
-- Dolphin models (Cognitive Computations / Eric Hartford)
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'cognitive_computations')
WHERE "slug" LIKE 'dolphin-%';
-- Gryphe models
UPDATE "LlmModel" SET "creatorId" = (SELECT "id" FROM "LlmModelCreator" WHERE "name" = 'gryphe')
WHERE "slug" LIKE 'gryphe/%' OR "slug" LIKE 'mythomax%';

View File

@@ -966,6 +966,22 @@ enum LlmCostUnit {
TOKENS
}
model LlmModelCreator {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
name String @unique // e.g., "openai", "anthropic", "meta"
displayName String // e.g., "OpenAI", "Anthropic", "Meta"
description String?
websiteUrl String? // Link to creator's website
logoUrl String? // URL to creator's logo
metadata Json @default("{}")
Models LlmModel[]
}
model LlmProvider {
id String @id @default(uuid())
createdAt DateTime @default(now())
@@ -1001,6 +1017,11 @@ model LlmModel {
providerId String
Provider LlmProvider @relation(fields: [providerId], references: [id], onDelete: Restrict)
// Creator is the organization that created/trained the model (e.g., OpenAI, Meta)
// This is distinct from the provider who hosts/serves the model (e.g., OpenRouter)
creatorId String?
Creator LlmModelCreator? @relation(fields: [creatorId], references: [id], onDelete: SetNull)
contextWindow Int
maxOutputTokens Int?
isEnabled Boolean @default(true)
@@ -1011,6 +1032,7 @@ model LlmModel {
Costs LlmModelCost[]
@@index([providerId, isEnabled])
@@index([creatorId])
@@index([slug])
}

View File

@@ -3,11 +3,13 @@
import BackendApi from "@/lib/autogpt-server-api";
import type {
CreateLlmModelRequest,
LlmCreatorsResponse,
LlmMigrationsResponse,
LlmModelsResponse,
LlmProvidersResponse,
ToggleLlmModelRequest,
UpdateLlmModelRequest,
UpsertLlmCreatorRequest,
UpsertLlmProviderRequest,
} from "@/lib/autogpt-server-api/types";
import { revalidatePath } from "next/cache";
@@ -50,12 +52,13 @@ export async function createLlmProviderAction(formData: FormData) {
export async function createLlmModelAction(formData: FormData) {
const providerId = String(formData.get("provider_id"));
const creatorId = formData.get("creator_id");
// Fetch provider to get default credentials
const api = new BackendApi();
const providersResponse = await api.listAdminLlmProviders(false);
const provider = providersResponse.providers.find((p) => p.id === providerId);
if (!provider) {
throw new Error("Provider not found");
}
@@ -67,6 +70,7 @@ export async function createLlmModelAction(formData: FormData) {
? String(formData.get("description"))
: undefined,
provider_id: providerId,
creator_id: creatorId ? String(creatorId) : undefined,
context_window: Number(formData.get("context_window") || 0),
max_output_tokens: formData.get("max_output_tokens")
? Number(formData.get("max_output_tokens"))
@@ -92,6 +96,8 @@ export async function createLlmModelAction(formData: FormData) {
export async function updateLlmModelAction(formData: FormData) {
const modelId = String(formData.get("model_id"));
const creatorId = formData.get("creator_id");
const payload: UpdateLlmModelRequest = {
display_name: formData.get("display_name")
? String(formData.get("display_name"))
@@ -102,6 +108,7 @@ export async function updateLlmModelAction(formData: FormData) {
provider_id: formData.get("provider_id")
? String(formData.get("provider_id"))
: undefined,
creator_id: creatorId ? String(creatorId) : undefined,
context_window: formData.get("context_window")
? Number(formData.get("context_window"))
: undefined,
@@ -196,3 +203,64 @@ export async function revertLlmMigrationAction(
}
}
// Creator management actions
export async function fetchLlmCreators(): Promise<LlmCreatorsResponse> {
const api = new BackendApi();
return await api.listAdminLlmCreators();
}
export async function createLlmCreatorAction(formData: FormData): Promise<void> {
const payload: UpsertLlmCreatorRequest = {
name: String(formData.get("name") || "").trim(),
display_name: String(formData.get("display_name") || "").trim(),
description: formData.get("description")
? String(formData.get("description"))
: undefined,
website_url: formData.get("website_url")
? String(formData.get("website_url")).trim()
: undefined,
logo_url: formData.get("logo_url")
? String(formData.get("logo_url")).trim()
: undefined,
metadata: {},
};
const api = new BackendApi();
await api.createAdminLlmCreator(payload);
revalidatePath(ADMIN_LLM_PATH);
}
export async function updateLlmCreatorAction(formData: FormData): Promise<void> {
const creatorId = String(formData.get("creator_id"));
const payload: UpsertLlmCreatorRequest = {
name: String(formData.get("name") || "").trim(),
display_name: String(formData.get("display_name") || "").trim(),
description: formData.get("description")
? String(formData.get("description"))
: undefined,
website_url: formData.get("website_url")
? String(formData.get("website_url")).trim()
: undefined,
logo_url: formData.get("logo_url")
? String(formData.get("logo_url")).trim()
: undefined,
metadata: {},
};
const api = new BackendApi();
await api.updateAdminLlmCreator(creatorId, payload);
revalidatePath(ADMIN_LLM_PATH);
}
export async function deleteLlmCreatorAction(formData: FormData): Promise<void> {
try {
const creatorId = String(formData.get("creator_id"));
const api = new BackendApi();
await api.deleteAdminLlmCreator(creatorId);
revalidatePath(ADMIN_LLM_PATH);
} catch (error) {
console.error("Delete creator error:", error);
throw error instanceof Error ? error : new Error("Failed to delete creator");
}
}

View File

@@ -0,0 +1,123 @@
"use client";
import { useState } from "react";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { Button } from "@/components/atoms/Button/Button";
import { createLlmCreatorAction } from "../actions";
import { useRouter } from "next/navigation";
export function AddCreatorModal() {
const [open, setOpen] = useState(false);
const router = useRouter();
return (
<Dialog
title="Add Creator"
controlled={{ isOpen: open, set: setOpen }}
styling={{ maxWidth: "512px" }}
>
<Dialog.Trigger>
<Button variant="primary" size="small">
Add Creator
</Button>
</Dialog.Trigger>
<Dialog.Content>
<div className="mb-4 text-sm text-muted-foreground">
Add a new model creator (the organization that made/trained the
model).
</div>
<form
action={async (formData) => {
await createLlmCreatorAction(formData);
setOpen(false);
router.refresh();
}}
className="space-y-4"
>
<div className="grid gap-4 sm:grid-cols-2">
<div className="space-y-2">
<label
htmlFor="name"
className="text-sm font-medium text-foreground"
>
Name (slug) <span className="text-destructive">*</span>
</label>
<input
id="name"
required
name="name"
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm transition-colors placeholder:text-muted-foreground focus:border-primary focus:outline-none focus:ring-2 focus:ring-primary/20"
placeholder="openai"
/>
<p className="text-xs text-muted-foreground">
Lowercase identifier (e.g., openai, meta, anthropic)
</p>
</div>
<div className="space-y-2">
<label
htmlFor="display_name"
className="text-sm font-medium text-foreground"
>
Display Name <span className="text-destructive">*</span>
</label>
<input
id="display_name"
required
name="display_name"
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm transition-colors placeholder:text-muted-foreground focus:border-primary focus:outline-none focus:ring-2 focus:ring-primary/20"
placeholder="OpenAI"
/>
</div>
</div>
<div className="space-y-2">
<label
htmlFor="description"
className="text-sm font-medium text-foreground"
>
Description
</label>
<textarea
id="description"
name="description"
rows={2}
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm transition-colors placeholder:text-muted-foreground focus:border-primary focus:outline-none focus:ring-2 focus:ring-primary/20"
placeholder="Creator of GPT models..."
/>
</div>
<div className="space-y-2">
<label
htmlFor="website_url"
className="text-sm font-medium text-foreground"
>
Website URL
</label>
<input
id="website_url"
name="website_url"
type="url"
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm transition-colors placeholder:text-muted-foreground focus:border-primary focus:outline-none focus:ring-2 focus:ring-primary/20"
placeholder="https://openai.com"
/>
</div>
<Dialog.Footer>
<Button
variant="ghost"
size="small"
onClick={() => setOpen(false)}
type="button"
>
Cancel
</Button>
<Button variant="primary" size="small" type="submit">
Add Creator
</Button>
</Dialog.Footer>
</form>
</Dialog.Content>
</Dialog>
);
}

View File

@@ -3,15 +3,16 @@
import { useState } from "react";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { Button } from "@/components/atoms/Button/Button";
import type { LlmProvider } from "@/lib/autogpt-server-api/types";
import type { LlmProvider, LlmModelCreator } from "@/lib/autogpt-server-api/types";
import { createLlmModelAction } from "../actions";
import { useRouter } from "next/navigation";
interface Props {
providers: LlmProvider[];
creators: LlmModelCreator[];
}
export function AddModelModal({ providers }: Props) {
export function AddModelModal({ providers, creators }: Props) {
const [open, setOpen] = useState(false);
const router = useRouter();
@@ -106,7 +107,7 @@ export function AddModelModal({ providers }: Props) {
Model capabilities and limits
</p>
</div>
<div className="grid gap-4 sm:grid-cols-3">
<div className="grid gap-4 sm:grid-cols-2">
<div className="space-y-2">
<label
htmlFor="provider_id"
@@ -130,7 +131,36 @@ export function AddModelModal({ providers }: Props) {
</option>
))}
</select>
<p className="text-xs text-muted-foreground">
Who hosts/serves the model
</p>
</div>
<div className="space-y-2">
<label
htmlFor="creator_id"
className="text-sm font-medium text-foreground"
>
Creator
</label>
<select
id="creator_id"
name="creator_id"
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm transition-colors focus:border-primary focus:outline-none focus:ring-2 focus:ring-primary/20"
defaultValue=""
>
<option value="">No creator selected</option>
{creators.map((creator) => (
<option key={creator.id} value={creator.id}>
{creator.display_name} ({creator.name})
</option>
))}
</select>
<p className="text-xs text-muted-foreground">
Who made/trained the model (e.g., OpenAI, Meta)
</p>
</div>
</div>
<div className="grid gap-4 sm:grid-cols-2">
<div className="space-y-2">
<label
htmlFor="context_window"

View File

@@ -0,0 +1,196 @@
"use client";
import { useState } from "react";
import type { LlmModelCreator } from "@/lib/autogpt-server-api/types";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/atoms/Table/Table";
import { Button } from "@/components/atoms/Button/Button";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import {
deleteLlmCreatorAction,
updateLlmCreatorAction,
} from "../actions";
import { useRouter } from "next/navigation";
export function CreatorsTable({ creators }: { creators: LlmModelCreator[] }) {
if (!creators.length) {
return (
<div className="rounded-lg border border-dashed border-border p-6 text-center text-sm text-muted-foreground">
No creators registered yet.
</div>
);
}
return (
<div className="rounded-lg border">
<Table>
<TableHeader>
<TableRow>
<TableHead>Creator</TableHead>
<TableHead>Description</TableHead>
<TableHead>Website</TableHead>
<TableHead>Actions</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{creators.map((creator) => (
<TableRow key={creator.id}>
<TableCell>
<div className="font-medium">{creator.display_name}</div>
<div className="text-xs text-muted-foreground">
{creator.name}
</div>
</TableCell>
<TableCell>
<span className="text-sm text-muted-foreground">
{creator.description || "—"}
</span>
</TableCell>
<TableCell>
{creator.website_url ? (
<a
href={creator.website_url}
target="_blank"
rel="noopener noreferrer"
className="text-sm text-primary hover:underline"
>
{new URL(creator.website_url).hostname}
</a>
) : (
<span className="text-muted-foreground"></span>
)}
</TableCell>
<TableCell>
<div className="flex items-center justify-end gap-2">
<EditCreatorModal creator={creator} />
<DeleteCreatorButton creatorId={creator.id} />
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div>
);
}
function EditCreatorModal({ creator }: { creator: LlmModelCreator }) {
const [open, setOpen] = useState(false);
const router = useRouter();
return (
<Dialog
title="Edit Creator"
controlled={{ isOpen: open, set: setOpen }}
styling={{ maxWidth: "512px" }}
>
<Dialog.Trigger>
<Button variant="outline" size="small" className="min-w-0">
Edit
</Button>
</Dialog.Trigger>
<Dialog.Content>
<form
action={async (formData) => {
await updateLlmCreatorAction(formData);
setOpen(false);
router.refresh();
}}
className="space-y-4"
>
<input type="hidden" name="creator_id" value={creator.id} />
<div className="grid gap-4 sm:grid-cols-2">
<div className="space-y-2">
<label className="text-sm font-medium">Name (slug)</label>
<input
required
name="name"
defaultValue={creator.name}
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm"
/>
</div>
<div className="space-y-2">
<label className="text-sm font-medium">Display Name</label>
<input
required
name="display_name"
defaultValue={creator.display_name}
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm"
/>
</div>
</div>
<div className="space-y-2">
<label className="text-sm font-medium">Description</label>
<textarea
name="description"
rows={2}
defaultValue={creator.description ?? ""}
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm"
/>
</div>
<div className="space-y-2">
<label className="text-sm font-medium">Website URL</label>
<input
name="website_url"
type="url"
defaultValue={creator.website_url ?? ""}
className="w-full rounded-md border border-input bg-background px-3 py-2 text-sm"
/>
</div>
<Dialog.Footer>
<Button
variant="ghost"
size="small"
onClick={() => setOpen(false)}
type="button"
>
Cancel
</Button>
<Button variant="primary" size="small" type="submit">
Update
</Button>
</Dialog.Footer>
</form>
</Dialog.Content>
</Dialog>
);
}
function DeleteCreatorButton({ creatorId }: { creatorId: string }) {
const router = useRouter();
return (
<form
action={async (formData) => {
if (
confirm(
"Delete this creator? Models using this creator will have their creator set to none."
)
) {
await deleteLlmCreatorAction(formData);
router.refresh();
}
}}
>
<input type="hidden" name="creator_id" value={creatorId} />
<Button
type="submit"
variant="outline"
size="small"
className="min-w-0 text-destructive hover:bg-destructive/10"
>
Delete
</Button>
</form>
);
}

View File

@@ -3,15 +3,21 @@
import { useState } from "react";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { Button } from "@/components/atoms/Button/Button";
import type { LlmModel, LlmProvider } from "@/lib/autogpt-server-api/types";
import type {
LlmModel,
LlmModelCreator,
LlmProvider,
} from "@/lib/autogpt-server-api/types";
import { updateLlmModelAction } from "../actions";
export function EditModelModal({
model,
providers,
creators,
}: {
model: LlmModel;
providers: LlmProvider[];
creators: LlmModelCreator[];
}) {
const [open, setOpen] = useState(false);
const cost = model.costs[0];
@@ -65,6 +71,30 @@ export function EditModelModal({
</option>
))}
</select>
<span className="text-xs text-muted-foreground">
Who hosts/serves the model
</span>
</label>
</div>
<div className="grid gap-4 md:grid-cols-2">
<label className="text-sm font-medium">
Creator
<select
name="creator_id"
className="mt-1 w-full rounded border border-input bg-background p-2 text-sm"
defaultValue={model.creator_id ?? ""}
>
<option value="">No creator selected</option>
{creators.map((c) => (
<option key={c.id} value={c.id}>
{c.display_name} ({c.name})
</option>
))}
</select>
<span className="text-xs text-muted-foreground">
Who made/trained the model (e.g., OpenAI, Meta)
</span>
</label>
</div>

View File

@@ -2,37 +2,40 @@
import type {
LlmModel,
LlmModelCreator,
LlmModelMigration,
LlmProvider,
} from "@/lib/autogpt-server-api/types";
import { AddProviderModal } from "./AddProviderModal";
import { AddModelModal } from "./AddModelModal";
import { AddCreatorModal } from "./AddCreatorModal";
import { ProviderList } from "./ProviderList";
import { ModelsTable } from "./ModelsTable";
import { MigrationsTable } from "./MigrationsTable";
import { CreatorsTable } from "./CreatorsTable";
interface Props {
providers: LlmProvider[];
models: LlmModel[];
migrations: LlmModelMigration[];
creators: LlmModelCreator[];
}
export function LlmRegistryDashboard({ providers, models, migrations }: Props) {
export function LlmRegistryDashboard({
providers,
models,
migrations,
creators,
}: Props) {
return (
<div className="mx-auto p-6">
<div className="flex flex-col gap-6">
{/* Header */}
<div className="flex items-center justify-between">
<div>
<h1 className="text-3xl font-bold">LLM Registry</h1>
<p className="text-gray-500">
Manage supported providers, models, and credit pricing
</p>
</div>
<div className="flex gap-2">
<AddModelModal providers={providers} />
<AddProviderModal />
</div>
<div>
<h1 className="text-3xl font-bold">LLM Registry</h1>
<p className="text-gray-500">
Manage providers, creators, models, and credit pricing
</p>
</div>
{/* Active Migrations Section - Only show if there are migrations */}
@@ -49,27 +52,50 @@ export function LlmRegistryDashboard({ providers, models, migrations }: Props) {
</div>
)}
{/* Providers Section */}
<div className="rounded-lg border bg-white p-6 shadow-sm dark:bg-background">
<div className="mb-4">
<h2 className="text-xl font-semibold">Providers</h2>
<p className="mt-1 text-sm text-gray-600 dark:text-gray-400">
Default credentials and feature flags for upstream vendors
</p>
{/* Providers & Creators Section - Side by Side */}
<div className="grid gap-6 lg:grid-cols-2">
{/* Providers */}
<div className="rounded-lg border bg-white p-6 shadow-sm dark:bg-background">
<div className="mb-4 flex items-center justify-between">
<div>
<h2 className="text-xl font-semibold">Providers</h2>
<p className="mt-1 text-sm text-gray-600 dark:text-gray-400">
Who hosts/serves the models
</p>
</div>
<AddProviderModal />
</div>
<ProviderList providers={providers} />
</div>
{/* Creators */}
<div className="rounded-lg border bg-white p-6 shadow-sm dark:bg-background">
<div className="mb-4 flex items-center justify-between">
<div>
<h2 className="text-xl font-semibold">Creators</h2>
<p className="mt-1 text-sm text-gray-600 dark:text-gray-400">
Who made/trained the models
</p>
</div>
<AddCreatorModal />
</div>
<CreatorsTable creators={creators} />
</div>
<ProviderList providers={providers} />
</div>
{/* Models Section */}
<div className="rounded-lg border bg-white p-6 shadow-sm dark:bg-background">
<div className="mb-4">
<h2 className="text-xl font-semibold">Models</h2>
<p className="mt-1 text-sm text-gray-600 dark:text-gray-400">
Toggle availability, adjust context windows, and update credit
pricing
</p>
<div className="mb-4 flex items-center justify-between">
<div>
<h2 className="text-xl font-semibold">Models</h2>
<p className="mt-1 text-sm text-gray-600 dark:text-gray-400">
Toggle availability, adjust context windows, and update credit
pricing
</p>
</div>
<AddModelModal providers={providers} creators={creators} />
</div>
<ModelsTable models={models} providers={providers} />
<ModelsTable models={models} providers={providers} creators={creators} />
</div>
</div>
</div>

View File

@@ -1,4 +1,8 @@
import type { LlmModel, LlmProvider } from "@/lib/autogpt-server-api/types";
import type {
LlmModel,
LlmModelCreator,
LlmProvider,
} from "@/lib/autogpt-server-api/types";
import {
Table,
TableBody,
@@ -16,9 +20,11 @@ import { EditModelModal } from "./EditModelModal";
export function ModelsTable({
models,
providers,
creators,
}: {
models: LlmModel[];
providers: LlmProvider[];
creators: LlmModelCreator[];
}) {
if (!models.length) {
return (
@@ -39,6 +45,7 @@ export function ModelsTable({
<TableRow>
<TableHead>Model</TableHead>
<TableHead>Provider</TableHead>
<TableHead>Creator</TableHead>
<TableHead>Context Window</TableHead>
<TableHead>Max Output</TableHead>
<TableHead>Cost</TableHead>
@@ -73,6 +80,18 @@ export function ModelsTable({
model.provider_id
)}
</TableCell>
<TableCell>
{model.creator ? (
<>
<div>{model.creator.display_name}</div>
<div className="text-xs text-muted-foreground">
{model.creator.name}
</div>
</>
) : (
<span className="text-muted-foreground"></span>
)}
</TableCell>
<TableCell>{model.context_window.toLocaleString()}</TableCell>
<TableCell>
{model.max_output_tokens
@@ -114,7 +133,11 @@ export function ModelsTable({
) : (
<EnableModelButton modelId={model.id} />
)}
<EditModelModal model={model} providers={providers} />
<EditModelModal
model={model}
providers={providers}
creators={creators}
/>
<DeleteModelModal
model={model}
availableModels={models}

View File

@@ -3,6 +3,7 @@
*/
import {
fetchLlmCreators,
fetchLlmMigrations,
fetchLlmModels,
fetchLlmProviders,
@@ -26,10 +27,21 @@ export async function useLlmRegistryPage() {
console.warn("Could not fetch migrations - table may not exist yet");
}
// Fetch creators separately with fallback (table might not exist yet)
let creators: Awaited<ReturnType<typeof fetchLlmCreators>>["creators"] = [];
try {
const creatorsResponse = await fetchLlmCreators();
creators = creatorsResponse.creators;
} catch {
// Creators table might not exist yet - that's ok, just show empty list
console.warn("Could not fetch creators - table may not exist yet");
}
return {
providers: providersResponse.providers,
models: modelsResponse.models,
migrations,
creators,
};
}

View File

@@ -1125,9 +1125,47 @@ const NodeStringInput: FC<{
displayName,
}) => {
value ||= schema.default || "";
// Check if we have options with labels (e.g., LLM model picker)
const hasOptions = schema.options && schema.options.length > 0;
const hasEnum = schema.enum && schema.enum.length > 0;
// Helper to get display label for a value
const getDisplayLabel = (val: string) => {
if (hasOptions) {
const option = schema.options!.find((opt) => opt.value === val);
return option?.label || beautifyString(val);
}
return beautifyString(val);
};
return (
<div className={className}>
{schema.enum && schema.enum.length > 0 ? (
{hasOptions ? (
// Render options with proper labels (used by LLM model picker)
<Select
defaultValue={value}
onValueChange={(newValue) => handleInputChange(selfKey, newValue)}
>
<SelectTrigger>
<SelectValue placeholder={schema.placeholder || displayName}>
{value ? getDisplayLabel(value) : undefined}
</SelectValue>
</SelectTrigger>
<SelectContent className="nodrag">
{schema.options!.map((option, index) => (
<SelectItem
key={index}
value={option.value}
title={option.description}
>
{option.label}
</SelectItem>
))}
</SelectContent>
</Select>
) : hasEnum ? (
// Fallback to enum with beautified strings
<Select
defaultValue={value}
onValueChange={(newValue) => handleInputChange(selfKey, newValue)}
@@ -1136,7 +1174,7 @@ const NodeStringInput: FC<{
<SelectValue placeholder={schema.placeholder || displayName} />
</SelectTrigger>
<SelectContent className="nodrag">
{schema.enum
{schema.enum!
.filter((option) => option)
.map((option, index) => (
<SelectItem key={index} value={option}>

View File

@@ -1,8 +1,19 @@
import { RJSFSchema } from "@rjsf/utils";
/**
* Options type for fields with label/value pairs (e.g., LLM model picker)
*/
type SchemaOption = {
label: string;
value: string;
group?: string;
description?: string;
};
/**
* Pre-processes the input schema to ensure all properties have a type defined.
* If a property doesn't have a type, it assigns a union of all supported JSON Schema types.
* Also converts custom 'options' array to RJSF's enum/enumNames format.
*/
export function preprocessInputSchema(schema: RJSFSchema): RJSFSchema {
if (!schema || typeof schema !== "object") {
@@ -19,6 +30,20 @@ export function preprocessInputSchema(schema: RJSFSchema): RJSFSchema {
if (property && typeof property === "object") {
const processedProperty = { ...property };
// Convert custom 'options' array to RJSF's enum/enumNames format
// This enables proper label display for dropdowns like the LLM model picker
if (
(processedProperty as any).options &&
Array.isArray((processedProperty as any).options) &&
(processedProperty as any).options.length > 0
) {
const options = (processedProperty as any).options as SchemaOption[];
processedProperty.enum = options.map((opt) => opt.value);
(processedProperty as any).enumNames = options.map(
(opt) => opt.label,
);
}
// Only add type if no type is defined AND no anyOf/oneOf/allOf is present
if (
!processedProperty.type &&

View File

@@ -44,6 +44,7 @@ import type {
LibraryAgentResponse,
LibraryAgentSortEnum,
LlmModel,
LlmModelCreator,
MyAgentsResponse,
NodeExecutionResult,
NotificationPreference,
@@ -61,7 +62,9 @@ import type {
LlmMigrationsResponse,
RevertMigrationResponse,
UpsertLlmProviderRequest,
UpsertLlmCreatorRequest,
LlmModelsResponse,
LlmCreatorsResponse,
LlmProvider,
LlmProvidersResponse,
Schedule,
@@ -515,6 +518,34 @@ export default class BackendAPI {
return this._request("POST", `/llm/admin/llm/migrations/${migrationId}/revert`);
}
// Creator management
listAdminLlmCreators(): Promise<LlmCreatorsResponse> {
return this._get("/llm/admin/llm/creators");
}
getAdminLlmCreator(creatorId: string): Promise<LlmModelCreator> {
return this._get(`/llm/admin/llm/creators/${creatorId}`);
}
createAdminLlmCreator(
payload: UpsertLlmCreatorRequest,
): Promise<LlmModelCreator> {
return this._request("POST", "/llm/admin/llm/creators", payload);
}
updateAdminLlmCreator(
creatorId: string,
payload: UpsertLlmCreatorRequest,
): Promise<LlmModelCreator> {
return this._request("PATCH", `/llm/admin/llm/creators/${creatorId}`, payload);
}
deleteAdminLlmCreator(
creatorId: string,
): Promise<{ success: boolean; message: string }> {
return this._request("DELETE", `/llm/admin/llm/creators/${creatorId}`);
}
// API Key related requests
async createAPIKey(
name: string,

View File

@@ -168,9 +168,17 @@ export type BlockIOTableSubSchema = BlockIOSubSchemaMeta & {
secret?: boolean;
};
export type BlockIOStringSubSchemaOption = {
label: string;
value: string;
group?: string;
description?: string;
};
export type BlockIOStringSubSchema = BlockIOSubSchemaMeta & {
type: "string";
enum?: string[];
options?: BlockIOStringSubSchemaOption[];
secret?: true;
const?: string;
default?: string;
@@ -270,12 +278,26 @@ export type LlmModelCost = LlmModelCostInput & {
id: string;
};
// Creator represents the organization that created/trained the model (e.g., OpenAI, Meta)
// This is distinct from Provider who hosts/serves the model (e.g., OpenRouter)
export type LlmModelCreator = {
id: string;
name: string;
display_name: string;
description?: string | null;
website_url?: string | null;
logo_url?: string | null;
metadata: Record<string, any>;
};
export type LlmModel = {
id: string;
slug: string;
display_name: string;
description?: string | null;
provider_id: string;
creator_id?: string | null;
creator?: LlmModelCreator | null;
context_window: number;
max_output_tokens?: number | null;
is_enabled: boolean;
@@ -308,6 +330,19 @@ export type LlmModelsResponse = {
models: LlmModel[];
};
export type LlmCreatorsResponse = {
creators: LlmModelCreator[];
};
export type UpsertLlmCreatorRequest = {
name: string;
display_name: string;
description?: string | null;
website_url?: string | null;
logo_url?: string | null;
metadata?: Record<string, any>;
};
export type UpsertLlmProviderRequest = {
name: string;
display_name: string;
@@ -327,6 +362,7 @@ export type CreateLlmModelRequest = {
display_name: string;
description?: string | null;
provider_id: string;
creator_id?: string | null;
context_window: number;
max_output_tokens?: number | null;
is_enabled?: boolean;
@@ -339,6 +375,7 @@ export type UpdateLlmModelRequest = {
display_name?: string;
description?: string | null;
provider_id?: string;
creator_id?: string | null;
context_window?: number;
max_output_tokens?: number | null;
is_enabled?: boolean;