fix(onboarding): harden LiteLLM provider setup (#12823)

This commit is contained in:
Peter Steinberger
2026-02-11 11:35:53 +01:00
parent d7cdd0d910
commit 5f1dbcae36
6 changed files with 151 additions and 13 deletions

View File

@@ -37,6 +37,7 @@ Docs: https://docs.openclaw.ai
- Telegram: match DM `allowFrom` against sender user id (fallback to chat id) and clarify pairing logs. (#12779) Thanks @liuxiaopai-ai.
- Pairing/Telegram: include the actual pairing code in approve commands, route Telegram pairing replies through the shared pairing message builder, and add regression checks to prevent `<code>` placeholder drift.
- Onboarding: QuickStart now auto-installs shell completion (prompt only in Manual).
- Onboarding/Providers: add LiteLLM provider onboarding and preserve custom LiteLLM proxy base URLs while enforcing API-key auth mode. (#12823) Thanks @ryan-crabbe.
- Docker: make `docker-setup.sh` compatible with macOS Bash 3.2 and empty extra mounts. (#9441) Thanks @mateusz-michalik.
- Auth: strip embedded line breaks from pasted API keys and tokens before storing/resolving credentials.
- Agents: strip reasoning tags and downgraded tool markers from messaging tool and streaming output to prevent leakage. (#11053, #13453) Thanks @liebertar, @meaadore1221-afk, @gumadeiras.

View File

@@ -343,6 +343,7 @@ For teams preferring infrastructure-as-code workflows, a community-maintained Te
- SSH tunnel configuration for gateway access
**Repositories:**
- Infrastructure: [openclaw-terraform-hetzner](https://github.com/andreesg/openclaw-terraform-hetzner)
- Docker config: [openclaw-docker-config](https://github.com/andreesg/openclaw-docker-config)

View File

@@ -215,17 +215,10 @@ export async function applyAuthChoiceApiProviders(
const existingProfileId = profileOrder.find((profileId) => Boolean(store.profiles[profileId]));
const existingCred = existingProfileId ? store.profiles[existingProfileId] : undefined;
let profileId = "litellm:default";
let mode: "api_key" | "oauth" | "token" = "api_key";
let hasCredential = false;
if (existingProfileId && existingCred?.type) {
if (existingProfileId && existingCred?.type === "api_key") {
profileId = existingProfileId;
mode =
existingCred.type === "oauth"
? "oauth"
: existingCred.type === "token"
? "token"
: "api_key";
hasCredential = true;
}
@@ -272,7 +265,7 @@ export async function applyAuthChoiceApiProviders(
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider: "litellm",
mode,
mode: "api_key",
});
}
{

View File

@@ -32,6 +32,7 @@ describe("applyAuthChoice", () => {
const previousAgentDir = process.env.OPENCLAW_AGENT_DIR;
const previousPiAgentDir = process.env.PI_CODING_AGENT_DIR;
const previousOpenrouterKey = process.env.OPENROUTER_API_KEY;
const previousLitellmKey = process.env.LITELLM_API_KEY;
const previousAiGatewayKey = process.env.AI_GATEWAY_API_KEY;
const previousCloudflareGatewayKey = process.env.CLOUDFLARE_AI_GATEWAY_API_KEY;
const previousSshTty = process.env.SSH_TTY;
@@ -65,6 +66,11 @@ describe("applyAuthChoice", () => {
} else {
process.env.OPENROUTER_API_KEY = previousOpenrouterKey;
}
if (previousLitellmKey === undefined) {
delete process.env.LITELLM_API_KEY;
} else {
process.env.LITELLM_API_KEY = previousLitellmKey;
}
if (previousAiGatewayKey === undefined) {
delete process.env.AI_GATEWAY_API_KEY;
} else {
@@ -402,6 +408,96 @@ describe("applyAuthChoice", () => {
delete process.env.OPENROUTER_API_KEY;
});
it("ignores legacy LiteLLM oauth profiles when selecting litellm-api-key", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;
process.env.OPENCLAW_AGENT_DIR = path.join(tempStateDir, "agent");
process.env.PI_CODING_AGENT_DIR = process.env.OPENCLAW_AGENT_DIR;
process.env.LITELLM_API_KEY = "sk-litellm-test";
const authProfilePath = authProfilePathFor(requireAgentDir());
await fs.mkdir(path.dirname(authProfilePath), { recursive: true });
await fs.writeFile(
authProfilePath,
JSON.stringify(
{
version: 1,
profiles: {
"litellm:legacy": {
type: "oauth",
provider: "litellm",
access: "access-token",
refresh: "refresh-token",
expires: Date.now() + 60_000,
},
},
},
null,
2,
),
"utf8",
);
const text = vi.fn();
const select: WizardPrompter["select"] = vi.fn(
async (params) => params.options[0]?.value as never,
);
const multiselect: WizardPrompter["multiselect"] = vi.fn(async () => []);
const confirm = vi.fn(async () => true);
const prompter: WizardPrompter = {
intro: vi.fn(noopAsync),
outro: vi.fn(noopAsync),
note: vi.fn(noopAsync),
select,
multiselect,
text,
confirm,
progress: vi.fn(() => ({ update: noop, stop: noop })),
};
const runtime: RuntimeEnv = {
log: vi.fn(),
error: vi.fn(),
exit: vi.fn((code: number) => {
throw new Error(`exit:${code}`);
}),
};
const result = await applyAuthChoice({
authChoice: "litellm-api-key",
config: {
auth: {
profiles: {
"litellm:legacy": { provider: "litellm", mode: "oauth" },
},
order: { litellm: ["litellm:legacy"] },
},
},
prompter,
runtime,
setDefaultModel: true,
});
expect(confirm).toHaveBeenCalledWith(
expect.objectContaining({
message: expect.stringContaining("LITELLM_API_KEY"),
}),
);
expect(text).not.toHaveBeenCalled();
expect(result.config.auth?.profiles?.["litellm:default"]).toMatchObject({
provider: "litellm",
mode: "api_key",
});
const raw = await fs.readFile(authProfilePath, "utf8");
const parsed = JSON.parse(raw) as {
profiles?: Record<string, { type?: string; key?: string }>;
};
expect(parsed.profiles?.["litellm:default"]).toMatchObject({
type: "api_key",
key: "sk-litellm-test",
});
});
it("uses existing AI_GATEWAY_API_KEY when selecting ai-gateway-api-key", async () => {
tempStateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-auth-"));
process.env.OPENCLAW_STATE_DIR = tempStateDir;

View File

@@ -255,6 +255,14 @@ export function applyOpenrouterConfig(cfg: OpenClawConfig): OpenClawConfig {
export const LITELLM_BASE_URL = "http://localhost:4000";
export const LITELLM_DEFAULT_MODEL_ID = "claude-opus-4-6";
const LITELLM_DEFAULT_CONTEXT_WINDOW = 128_000;
const LITELLM_DEFAULT_MAX_TOKENS = 8_192;
const LITELLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
function buildLitellmModelDefinition(): {
id: string;
@@ -270,9 +278,10 @@ function buildLitellmModelDefinition(): {
name: "Claude Opus 4.6",
reasoning: true,
input: ["text", "image"],
cost: { input: 15, output: 75, cacheRead: 1.5, cacheWrite: 18.75 },
contextWindow: 200_000,
maxTokens: 64_000,
// LiteLLM routes to many upstreams; keep neutral placeholders.
cost: LITELLM_DEFAULT_COST,
contextWindow: LITELLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: LITELLM_DEFAULT_MAX_TOKENS,
};
}
@@ -293,11 +302,13 @@ export function applyLitellmProviderConfig(cfg: OpenClawConfig): OpenClawConfig
string,
unknown
> as { apiKey?: string };
const resolvedBaseUrl =
typeof existingProvider?.baseUrl === "string" ? existingProvider.baseUrl.trim() : "";
const resolvedApiKey = typeof existingApiKey === "string" ? existingApiKey : undefined;
const normalizedApiKey = resolvedApiKey?.trim();
providers.litellm = {
...existingProviderRest,
baseUrl: LITELLM_BASE_URL,
baseUrl: resolvedBaseUrl || LITELLM_BASE_URL,
api: "openai-completions",
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [defaultModel],

View File

@@ -5,6 +5,7 @@ import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import {
applyAuthProfileConfig,
applyLitellmProviderConfig,
applyMinimaxApiConfig,
applyMinimaxApiProviderConfig,
applyOpencodeZenConfig,
@@ -511,6 +512,41 @@ describe("applyOpenrouterProviderConfig", () => {
});
});
describe("applyLitellmProviderConfig", () => {
it("preserves existing baseUrl and api key while adding the default model", () => {
const cfg = applyLitellmProviderConfig({
models: {
providers: {
litellm: {
baseUrl: "https://litellm.example/v1",
apiKey: " old-key ",
api: "anthropic-messages",
models: [
{
id: "custom-model",
name: "Custom",
reasoning: false,
input: ["text"],
cost: { input: 1, output: 2, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1000,
maxTokens: 100,
},
],
},
},
},
});
expect(cfg.models?.providers?.litellm?.baseUrl).toBe("https://litellm.example/v1");
expect(cfg.models?.providers?.litellm?.api).toBe("openai-completions");
expect(cfg.models?.providers?.litellm?.apiKey).toBe("old-key");
expect(cfg.models?.providers?.litellm?.models.map((m) => m.id)).toEqual([
"custom-model",
"claude-opus-4-6",
]);
});
});
describe("applyOpenrouterConfig", () => {
it("sets correct primary model", () => {
const cfg = applyOpenrouterConfig({});