feat: add LiteLLM onboarding handler and provider config

Add applyLitellmProviderConfig which properly registers
models.providers.litellm with baseUrl, api type, and model definitions.
This fixes the critical bug from PR #6488 where the provider entry was
never created, causing model resolution to fail at runtime.
This commit is contained in:
Ryan Crabbe
2026-02-07 17:35:25 -08:00
committed by Peter Steinberger
parent a4a7a0256c
commit ddbbc2ae12
4 changed files with 185 additions and 0 deletions

View File

@@ -189,6 +189,11 @@ export function buildAuthChoiceOptions(params: {
label: "Qianfan API key",
});
options.push({ value: "openrouter-api-key", label: "OpenRouter API key" });
options.push({
value: "litellm-api-key",
label: "LiteLLM API key",
hint: "Unified gateway for 100+ LLM providers",
});
options.push({
value: "ai-gateway-api-key",
label: "Vercel AI Gateway API key",

View File

@@ -19,6 +19,8 @@ import {
applyQianfanProviderConfig,
applyKimiCodeConfig,
applyKimiCodeProviderConfig,
applyLitellmConfig,
applyLitellmProviderConfig,
applyMoonshotConfig,
applyMoonshotConfigCn,
applyMoonshotProviderConfig,
@@ -39,6 +41,7 @@ import {
applyXiaomiProviderConfig,
applyZaiConfig,
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
QIANFAN_DEFAULT_MODEL_REF,
KIMI_CODING_MODEL_REF,
MOONSHOT_DEFAULT_MODEL_REF,
@@ -51,6 +54,7 @@ import {
setCloudflareAiGatewayConfig,
setQianfanApiKey,
setGeminiApiKey,
setLitellmApiKey,
setKimiCodingApiKey,
setMoonshotApiKey,
setOpencodeZenApiKey,
@@ -89,6 +93,8 @@ export async function applyAuthChoiceApiProviders(
) {
if (params.opts.tokenProvider === "openrouter") {
authChoice = "openrouter-api-key";
} else if (params.opts.tokenProvider === "litellm") {
authChoice = "litellm-api-key";
} else if (params.opts.tokenProvider === "vercel-ai-gateway") {
authChoice = "ai-gateway-api-key";
} else if (params.opts.tokenProvider === "cloudflare-ai-gateway") {
@@ -197,6 +203,95 @@ export async function applyAuthChoiceApiProviders(
return { config: nextConfig, agentModelOverride };
}
if (authChoice === "litellm-api-key") {
const store = ensureAuthProfileStore(params.agentDir, {
allowKeychainPrompt: false,
});
const profileOrder = resolveAuthProfileOrder({
cfg: nextConfig,
store,
provider: "litellm",
});
const existingProfileId = profileOrder.find((profileId) => Boolean(store.profiles[profileId]));
const existingCred = existingProfileId ? store.profiles[existingProfileId] : undefined;
let profileId = "litellm:default";
let mode: "api_key" | "oauth" | "token" = "api_key";
let hasCredential = false;
if (existingProfileId && existingCred?.type) {
profileId = existingProfileId;
mode =
existingCred.type === "oauth"
? "oauth"
: existingCred.type === "token"
? "token"
: "api_key";
hasCredential = true;
}
if (!hasCredential && params.opts?.token && params.opts?.tokenProvider === "litellm") {
await setLitellmApiKey(normalizeApiKeyInput(params.opts.token), params.agentDir);
hasCredential = true;
}
if (!hasCredential) {
await params.prompter.note(
[
"LiteLLM provides a unified API to 100+ LLM providers.",
"Get your API key from your LiteLLM proxy or https://litellm.ai",
"Default proxy runs on http://localhost:4000",
].join("\n"),
"LiteLLM",
);
}
if (!hasCredential) {
const envKey = resolveEnvApiKey("litellm");
if (envKey) {
const useExisting = await params.prompter.confirm({
message: `Use existing LITELLM_API_KEY (${envKey.source}, ${formatApiKeyPreview(envKey.apiKey)})?`,
initialValue: true,
});
if (useExisting) {
await setLitellmApiKey(envKey.apiKey, params.agentDir);
hasCredential = true;
}
}
}
if (!hasCredential) {
const key = await params.prompter.text({
message: "Enter LiteLLM API key",
validate: validateApiKeyInput,
});
await setLitellmApiKey(normalizeApiKeyInput(String(key)), params.agentDir);
hasCredential = true;
}
if (hasCredential) {
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider: "litellm",
mode,
});
}
{
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: LITELLM_DEFAULT_MODEL_REF,
applyDefaultConfig: applyLitellmConfig,
applyProviderConfig: applyLitellmProviderConfig,
noteDefault: LITELLM_DEFAULT_MODEL_REF,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
}
return { config: nextConfig, agentModelOverride };
}
if (authChoice === "ai-gateway-api-key") {
let hasCredential = false;

View File

@@ -29,6 +29,7 @@ import {
} from "../agents/venice-models.js";
import {
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
OPENROUTER_DEFAULT_MODEL_REF,
TOGETHER_DEFAULT_MODEL_REF,
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
@@ -252,6 +253,86 @@ export function applyOpenrouterConfig(cfg: OpenClawConfig): OpenClawConfig {
};
}
export const LITELLM_BASE_URL = "http://localhost:4000";
export const LITELLM_DEFAULT_MODEL_ID = "claude-opus-4-6";
function buildLitellmModelDefinition(): { id: string; name: string; reasoning: boolean; input: Array<"text" | "image">; cost: { input: number; output: number; cacheRead: number; cacheWrite: number }; contextWindow: number; maxTokens: number } {
return {
id: LITELLM_DEFAULT_MODEL_ID,
name: "Claude Opus 4.6",
reasoning: true,
input: ["text", "image"],
cost: { input: 15, output: 75, cacheRead: 1.5, cacheWrite: 18.75 },
contextWindow: 200_000,
maxTokens: 64_000,
};
}
export function applyLitellmProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models[LITELLM_DEFAULT_MODEL_REF] = {
...models[LITELLM_DEFAULT_MODEL_REF],
alias: models[LITELLM_DEFAULT_MODEL_REF]?.alias ?? "LiteLLM",
};
const providers = { ...cfg.models?.providers };
const existingProvider = providers.litellm;
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
const defaultModel = buildLitellmModelDefinition();
const hasDefaultModel = existingModels.some((model) => model.id === LITELLM_DEFAULT_MODEL_ID);
const mergedModels = hasDefaultModel ? existingModels : [...existingModels, defaultModel];
const { apiKey: existingApiKey, ...existingProviderRest } = (existingProvider ?? {}) as Record<
string,
unknown
> as { apiKey?: string };
const resolvedApiKey = typeof existingApiKey === "string" ? existingApiKey : undefined;
const normalizedApiKey = resolvedApiKey?.trim();
providers.litellm = {
...existingProviderRest,
baseUrl: LITELLM_BASE_URL,
api: "openai-completions",
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [defaultModel],
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
};
}
export function applyLitellmConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyLitellmProviderConfig(cfg);
const existingModel = next.agents?.defaults?.model;
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(existingModel && "fallbacks" in (existingModel as Record<string, unknown>)
? {
fallbacks: (existingModel as { fallbacks?: string[] }).fallbacks,
}
: undefined),
primary: LITELLM_DEFAULT_MODEL_REF,
},
},
},
};
}
export function applyMoonshotProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
return applyMoonshotProviderConfigWithBaseUrl(cfg, MOONSHOT_BASE_URL);
}

View File

@@ -11,6 +11,8 @@ export {
applyQianfanProviderConfig,
applyKimiCodeConfig,
applyKimiCodeProviderConfig,
applyLitellmConfig,
applyLitellmProviderConfig,
applyMoonshotConfig,
applyMoonshotConfigCn,
applyMoonshotProviderConfig,
@@ -46,11 +48,13 @@ export {
} from "./onboard-auth.config-opencode.js";
export {
CLOUDFLARE_AI_GATEWAY_DEFAULT_MODEL_REF,
LITELLM_DEFAULT_MODEL_REF,
OPENROUTER_DEFAULT_MODEL_REF,
setAnthropicApiKey,
setCloudflareAiGatewayConfig,
setQianfanApiKey,
setGeminiApiKey,
setLitellmApiKey,
setKimiCodingApiKey,
setMinimaxApiKey,
setMoonshotApiKey,