mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-09 23:28:07 -05:00
feat(blocks): Add Open Router integration with a large selection of new models (#8653)
* feat: Add Open Router integration credentials - Added support for Open Router integration credentials in the Supabase integration credentials store. - Updated the LLM provider field to include "open_router" as a valid provider option. - Added Open Router API key field to the backend settings. - Updated the profile page to display the Open Router integration credentials. - Updated the credentials input and provider components to include Open Router as a provider option. - Updated the autogpt-server-api types to include "open_router" as a provider name. - Updated the LLM provider schema to include "open_router" as a valid provider name. - Added GEMINI_FLASH_1_5_8B as the first Open Router LLM * Add type ignore to new llm prompt to match the rest of them. * Update LlmModel with a selection of new OpenRouter models * format
This commit is contained in:
committed by
GitHub
parent
402789d8cd
commit
29cff1bb4e
@@ -79,13 +79,20 @@ jina_credentials = APIKeyCredentials(
|
||||
title="Use Credits for Jina",
|
||||
expires_at=None,
|
||||
)
|
||||
unreal_credentials = APIKeyCredentials(#
|
||||
unreal_credentials = APIKeyCredentials(
|
||||
id="66f20754-1b81-48e4-91d0-f4f0dd82145f",
|
||||
provider="unreal",
|
||||
api_key=SecretStr(settings.secrets.unreal_speech_api_key),
|
||||
title="Use Credits for Unreal",
|
||||
expires_at=None,
|
||||
)
|
||||
open_router_credentials = APIKeyCredentials(
|
||||
id="b5a0e27d-0c98-4df3-a4b9-10193e1f3c40",
|
||||
provider="open_router",
|
||||
api_key=SecretStr(settings.secrets.open_router_api_key),
|
||||
title="Use Credits for Open Router",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CREDENTIALS = [
|
||||
@@ -98,6 +105,7 @@ DEFAULT_CREDENTIALS = [
|
||||
did_credentials,
|
||||
jina_credentials,
|
||||
unreal_credentials,
|
||||
open_router_credentials,
|
||||
]
|
||||
|
||||
|
||||
@@ -145,6 +153,8 @@ class SupabaseIntegrationCredentialsStore:
|
||||
all_credentials.append(jina_credentials)
|
||||
if settings.secrets.unreal_speech_api_key:
|
||||
all_credentials.append(unreal_credentials)
|
||||
if settings.secrets.open_router_api_key:
|
||||
all_credentials.append(open_router_credentials)
|
||||
return all_credentials
|
||||
|
||||
def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:
|
||||
|
||||
@@ -57,6 +57,7 @@ GOOGLE_CLIENT_SECRET=
|
||||
OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
OPEN_ROUTER_API_KEY=
|
||||
|
||||
# Reddit
|
||||
REDDIT_CLIENT_ID=
|
||||
|
||||
@@ -30,7 +30,7 @@ logger = logging.getLogger(__name__)
|
||||
# "ollama": BlockSecret(value=""),
|
||||
# }
|
||||
|
||||
LLMProviderName = Literal["anthropic", "groq", "openai", "ollama"]
|
||||
LLMProviderName = Literal["anthropic", "groq", "openai", "ollama", "open_router"]
|
||||
AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -51,7 +51,7 @@ TEST_CREDENTIALS_INPUT = {
|
||||
def AICredentialsField() -> AICredentials:
|
||||
return CredentialsField(
|
||||
description="API key for the LLM provider.",
|
||||
provider=["anthropic", "groq", "openai", "ollama"],
|
||||
provider=["anthropic", "groq", "openai", "ollama", "open_router"],
|
||||
supported_credential_types={"api_key"},
|
||||
discriminator="model",
|
||||
discriminator_mapping={
|
||||
@@ -108,6 +108,18 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
# Ollama models
|
||||
OLLAMA_LLAMA3_8B = "llama3"
|
||||
OLLAMA_LLAMA3_405B = "llama3.1:405b"
|
||||
# OpenRouter models
|
||||
GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5"
|
||||
GEMINI_FLASH_1_5_EXP = "google/gemini-flash-1.5-exp"
|
||||
GROK_BETA = "x-ai/grok-beta"
|
||||
MISTRAL_NEMO = "mistralai/mistral-nemo"
|
||||
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
|
||||
COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024"
|
||||
EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b"
|
||||
DEEPSEEK_CHAT = "deepseek/deepseek-chat"
|
||||
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
|
||||
"perplexity/llama-3.1-sonar-large-128k-online"
|
||||
)
|
||||
|
||||
@property
|
||||
def metadata(self) -> ModelMetadata:
|
||||
@@ -142,6 +154,17 @@ MODEL_METADATA = {
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: ModelMetadata("open_router", 8192),
|
||||
LlmModel.GEMINI_FLASH_1_5_EXP: ModelMetadata("open_router", 8192),
|
||||
LlmModel.GROK_BETA: ModelMetadata("open_router", 8192),
|
||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 4000),
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 4000),
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 4000),
|
||||
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 8192),
|
||||
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
|
||||
"open_router", 8192
|
||||
),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -354,6 +377,34 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
response.get("prompt_eval_count") or 0,
|
||||
response.get("eval_count") or 0,
|
||||
)
|
||||
elif provider == "open_router":
|
||||
client = openai.OpenAI(
|
||||
base_url="https://openrouter.ai/api/v1",
|
||||
api_key=credentials.api_key.get_secret_value(),
|
||||
)
|
||||
|
||||
response = client.chat.completions.create(
|
||||
extra_headers={
|
||||
"HTTP-Referer": "https://agpt.co",
|
||||
"X-Title": "AutoGPT",
|
||||
},
|
||||
model=llm_model.value,
|
||||
messages=prompt, # type: ignore
|
||||
max_tokens=max_tokens,
|
||||
)
|
||||
|
||||
# If there's no response, raise an error
|
||||
if not response.choices:
|
||||
if response:
|
||||
raise ValueError(f"OpenRouter error: {response}")
|
||||
else:
|
||||
raise ValueError("No response from OpenRouter.")
|
||||
|
||||
return (
|
||||
response.choices[0].message.content or "",
|
||||
response.usage.prompt_tokens if response.usage else 0,
|
||||
response.usage.completion_tokens if response.usage else 0,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported LLM provider: {provider}")
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ from autogpt_libs.supabase_integration_credentials_store.store import (
|
||||
groq_credentials,
|
||||
ideogram_credentials,
|
||||
jina_credentials,
|
||||
open_router_credentials,
|
||||
openai_credentials,
|
||||
replicate_credentials,
|
||||
revid_credentials,
|
||||
@@ -54,6 +55,15 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.LLAMA3_1_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_405B: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5_EXP: 1,
|
||||
LlmModel.GROK_BETA: 5,
|
||||
LlmModel.MISTRAL_NEMO: 1,
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: 3,
|
||||
LlmModel.EVA_QWEN_2_5_32B: 1,
|
||||
LlmModel.DEEPSEEK_CHAT: 2,
|
||||
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: 1,
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -124,6 +134,23 @@ LLM_COST = (
|
||||
cost_filter={"api_key": None},
|
||||
),
|
||||
]
|
||||
# Open Router Models
|
||||
+ [
|
||||
BlockCost(
|
||||
cost_type=BlockCostType.RUN,
|
||||
cost_filter={
|
||||
"model": model,
|
||||
"credentials": {
|
||||
"id": open_router_credentials.id,
|
||||
"provider": open_router_credentials.provider,
|
||||
"type": open_router_credentials.type,
|
||||
},
|
||||
},
|
||||
cost_amount=cost,
|
||||
)
|
||||
for model, cost in MODEL_COST.items()
|
||||
if MODEL_METADATA[model].provider == "open_router"
|
||||
]
|
||||
)
|
||||
|
||||
# =============== This is the exhaustive list of cost for each Block =============== #
|
||||
|
||||
@@ -238,6 +238,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
openai_api_key: str = Field(default="", description="OpenAI API key")
|
||||
anthropic_api_key: str = Field(default="", description="Anthropic API key")
|
||||
groq_api_key: str = Field(default="", description="Groq API key")
|
||||
open_router_api_key: str = Field(default="", description="Open Router API Key")
|
||||
|
||||
reddit_client_id: str = Field(default="", description="Reddit client ID")
|
||||
reddit_client_secret: str = Field(default="", description="Reddit client secret")
|
||||
|
||||
@@ -73,6 +73,7 @@ export default function PrivatePage() {
|
||||
"7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID
|
||||
"7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina
|
||||
"66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech
|
||||
"b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
@@ -60,6 +60,7 @@ export const providerIcons: Record<
|
||||
ollama: fallbackIcon,
|
||||
openai: fallbackIcon,
|
||||
openweathermap: fallbackIcon,
|
||||
open_router: fallbackIcon,
|
||||
pinecone: fallbackIcon,
|
||||
replicate: fallbackIcon,
|
||||
revid: fallbackIcon,
|
||||
|
||||
@@ -34,6 +34,7 @@ const providerDisplayNames: Record<CredentialsProviderName, string> = {
|
||||
ollama: "Ollama",
|
||||
openai: "OpenAI",
|
||||
openweathermap: "OpenWeatherMap",
|
||||
open_router: "Open Router",
|
||||
pinecone: "Pinecone",
|
||||
replicate: "Replicate",
|
||||
revid: "Rev.ID",
|
||||
|
||||
@@ -112,6 +112,7 @@ export const PROVIDER_NAMES = {
|
||||
OLLAMA: "ollama",
|
||||
OPENAI: "openai",
|
||||
OPENWEATHERMAP: "openweathermap",
|
||||
OPEN_ROUTER: "open_router",
|
||||
PINECONE: "pinecone",
|
||||
REPLICATE: "replicate",
|
||||
REVID: "revid",
|
||||
|
||||
Reference in New Issue
Block a user