From 029b77c85bde8809c147218dde5bd67e091e446f Mon Sep 17 00:00:00 2001 From: ENCHIGO <38551565+ENCHIGO@users.noreply.github.com> Date: Thu, 12 Feb 2026 03:48:45 +0800 Subject: [PATCH] onboard: support custom provider in non-interactive flow (#14223) Merged via /review-pr -> /prepare-pr -> /merge-pr. Prepared head SHA: 5b98d6514e73f7ee934a350f3b38619c70f49aed Co-authored-by: ENCHIGO <38551565+ENCHIGO@users.noreply.github.com> Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com> Reviewed-by: @gumadeiras --- docs/cli/index.md | 7 +- docs/cli/onboard.md | 13 + docs/start/wizard-cli-automation.md | 17 + docs/start/wizard-cli-reference.md | 12 + src/cli/program.smoke.test.ts | 36 ++ src/cli/program/register.onboard.ts | 15 +- src/commands/onboard-custom.test.ts | 78 +++- src/commands/onboard-custom.ts | 350 ++++++++++++++---- ...oard-non-interactive.provider-auth.test.ts | 240 ++++++++++++ .../onboard-non-interactive/api-keys.ts | 14 + .../local/auth-choice-inference.ts | 29 +- .../local/auth-choice.ts | 65 ++++ src/commands/onboard-types.ts | 5 + 13 files changed, 791 insertions(+), 90 deletions(-) diff --git a/docs/cli/index.md b/docs/cli/index.md index 918d92ad34..65448f4ee1 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -303,7 +303,7 @@ Options: - `--non-interactive` - `--mode ` - `--flow ` (manual is an alias for advanced) -- `--auth-choice ` +- `--auth-choice ` - `--token-provider ` (non-interactive; used with `--auth-choice token`) - `--token ` (non-interactive; used with `--auth-choice token`) - `--token-profile-id ` (non-interactive; default: `:manual`) @@ -318,6 +318,11 @@ Options: - `--zai-api-key ` - `--minimax-api-key ` - `--opencode-zen-api-key ` +- `--custom-base-url ` (non-interactive; used with `--auth-choice custom-api-key`) +- `--custom-model-id ` (non-interactive; used with `--auth-choice custom-api-key`) +- `--custom-api-key ` (non-interactive; optional; used with `--auth-choice custom-api-key`; falls back to `CUSTOM_API_KEY` when omitted) +- `--custom-provider-id ` (non-interactive; optional custom provider id) +- `--custom-compatibility ` (non-interactive; optional; default `openai`) - `--gateway-port ` - `--gateway-bind ` - `--gateway-auth ` diff --git a/docs/cli/onboard.md b/docs/cli/onboard.md index e32fd6ae67..2b4c97b1cf 100644 --- a/docs/cli/onboard.md +++ b/docs/cli/onboard.md @@ -26,6 +26,19 @@ openclaw onboard --flow manual openclaw onboard --mode remote --remote-url ws://gateway-host:18789 ``` +Non-interactive custom provider: + +```bash +openclaw onboard --non-interactive \ + --auth-choice custom-api-key \ + --custom-base-url "https://llm.example.com/v1" \ + --custom-model-id "foo-large" \ + --custom-api-key "$CUSTOM_API_KEY" \ + --custom-compatibility openai +``` + +`--custom-api-key` is optional in non-interactive mode. If omitted, onboarding checks `CUSTOM_API_KEY`. + Flow notes: - `quickstart`: minimal prompts, auto-generates a gateway token. diff --git a/docs/start/wizard-cli-automation.md b/docs/start/wizard-cli-automation.md index 081c0a1954..1eb85c36a1 100644 --- a/docs/start/wizard-cli-automation.md +++ b/docs/start/wizard-cli-automation.md @@ -106,6 +106,23 @@ Add `--json` for a machine-readable summary. --gateway-bind loopback ``` + + ```bash + openclaw onboard --non-interactive \ + --mode local \ + --auth-choice custom-api-key \ + --custom-base-url "https://llm.example.com/v1" \ + --custom-model-id "foo-large" \ + --custom-api-key "$CUSTOM_API_KEY" \ + --custom-provider-id "my-custom" \ + --custom-compatibility anthropic \ + --gateway-port 18789 \ + --gateway-bind loopback + ``` + + `--custom-api-key` is optional. If omitted, onboarding checks `CUSTOM_API_KEY`. + + ## Add another agent diff --git a/docs/start/wizard-cli-reference.md b/docs/start/wizard-cli-reference.md index ccfdf4d17a..b0b31de8c6 100644 --- a/docs/start/wizard-cli-reference.md +++ b/docs/start/wizard-cli-reference.md @@ -175,6 +175,18 @@ What you set: Moonshot (Kimi K2) and Kimi Coding configs are auto-written. More detail: [Moonshot AI (Kimi + Kimi Coding)](/providers/moonshot). + + Works with OpenAI-compatible and Anthropic-compatible endpoints. + + Non-interactive flags: + - `--auth-choice custom-api-key` + - `--custom-base-url` + - `--custom-model-id` + - `--custom-api-key` (optional; falls back to `CUSTOM_API_KEY`) + - `--custom-provider-id` (optional) + - `--custom-compatibility ` (optional; default `openai`) + + Leaves auth unconfigured. diff --git a/src/cli/program.smoke.test.ts b/src/cli/program.smoke.test.ts index 66fefef84c..97e71d631f 100644 --- a/src/cli/program.smoke.test.ts +++ b/src/cli/program.smoke.test.ts @@ -228,6 +228,42 @@ describe("cli program (smoke)", () => { } }); + it("passes custom provider flags to onboard", async () => { + const program = buildProgram(); + await program.parseAsync( + [ + "onboard", + "--non-interactive", + "--auth-choice", + "custom-api-key", + "--custom-base-url", + "https://llm.example.com/v1", + "--custom-api-key", + "sk-custom-test", + "--custom-model-id", + "foo-large", + "--custom-provider-id", + "my-custom", + "--custom-compatibility", + "anthropic", + ], + { from: "user" }, + ); + + expect(onboardCommand).toHaveBeenCalledWith( + expect.objectContaining({ + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://llm.example.com/v1", + customApiKey: "sk-custom-test", + customModelId: "foo-large", + customProviderId: "my-custom", + customCompatibility: "anthropic", + }), + runtime, + ); + }); + it("runs channels login", async () => { const program = buildProgram(); await program.parseAsync(["channels", "login", "--account", "work"], { diff --git a/src/cli/program/register.onboard.ts b/src/cli/program/register.onboard.ts index 33c276f562..df8d241830 100644 --- a/src/cli/program/register.onboard.ts +++ b/src/cli/program/register.onboard.ts @@ -58,7 +58,7 @@ export function registerOnboardCommand(program: Command) { .option("--mode ", "Wizard mode: local|remote") .option( "--auth-choice ", - "Auth: setup-token|token|chutes|openai-codex|openai-api-key|xai-api-key|qianfan-api-key|openrouter-api-key|litellm-api-key|ai-gateway-api-key|cloudflare-ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip|together-api-key", + "Auth: setup-token|token|chutes|openai-codex|openai-api-key|xai-api-key|qianfan-api-key|openrouter-api-key|litellm-api-key|ai-gateway-api-key|cloudflare-ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|custom-api-key|skip|together-api-key", ) .option( "--token-provider ", @@ -90,6 +90,14 @@ export function registerOnboardCommand(program: Command) { .option("--xai-api-key ", "xAI API key") .option("--litellm-api-key ", "LiteLLM API key") .option("--qianfan-api-key ", "QIANFAN API key") + .option("--custom-base-url ", "Custom provider base URL") + .option("--custom-api-key ", "Custom provider API key (optional)") + .option("--custom-model-id ", "Custom provider model ID") + .option("--custom-provider-id ", "Custom provider ID (optional; auto-derived by default)") + .option( + "--custom-compatibility ", + "Custom provider API compatibility: openai|anthropic (default: openai)", + ) .option("--gateway-port ", "Gateway port") .option("--gateway-bind ", "Gateway bind: loopback|tailnet|lan|auto|custom") .option("--gateway-auth ", "Gateway auth: token|password") @@ -148,6 +156,11 @@ export function registerOnboardCommand(program: Command) { opencodeZenApiKey: opts.opencodeZenApiKey as string | undefined, xaiApiKey: opts.xaiApiKey as string | undefined, litellmApiKey: opts.litellmApiKey as string | undefined, + customBaseUrl: opts.customBaseUrl as string | undefined, + customApiKey: opts.customApiKey as string | undefined, + customModelId: opts.customModelId as string | undefined, + customProviderId: opts.customProviderId as string | undefined, + customCompatibility: opts.customCompatibility as "openai" | "anthropic" | undefined, gatewayPort: typeof gatewayPort === "number" && Number.isFinite(gatewayPort) ? gatewayPort diff --git a/src/commands/onboard-custom.test.ts b/src/commands/onboard-custom.test.ts index 16c07c287c..1e59512536 100644 --- a/src/commands/onboard-custom.test.ts +++ b/src/commands/onboard-custom.test.ts @@ -1,6 +1,10 @@ import { afterEach, describe, expect, it, vi } from "vitest"; import { defaultRuntime } from "../runtime.js"; -import { promptCustomApiConfig } from "./onboard-custom.js"; +import { + applyCustomApiConfig, + parseNonInteractiveCustomApiFlags, + promptCustomApiConfig, +} from "./onboard-custom.js"; // Mock dependencies vi.mock("./model-picker.js", () => ({ @@ -268,3 +272,75 @@ describe("promptCustomApiConfig", () => { expect(prompter.text).toHaveBeenCalledTimes(6); }); }); + +describe("applyCustomApiConfig", () => { + it("rejects invalid compatibility values at runtime", () => { + expect(() => + applyCustomApiConfig({ + config: {}, + baseUrl: "https://llm.example.com/v1", + modelId: "foo-large", + compatibility: "invalid" as unknown as "openai", + }), + ).toThrow('Custom provider compatibility must be "openai" or "anthropic".'); + }); + + it("rejects explicit provider ids that normalize to empty", () => { + expect(() => + applyCustomApiConfig({ + config: {}, + baseUrl: "https://llm.example.com/v1", + modelId: "foo-large", + compatibility: "openai", + providerId: "!!!", + }), + ).toThrow("Custom provider ID must include letters, numbers, or hyphens."); + }); +}); + +describe("parseNonInteractiveCustomApiFlags", () => { + it("parses required flags and defaults compatibility to openai", () => { + const result = parseNonInteractiveCustomApiFlags({ + baseUrl: " https://llm.example.com/v1 ", + modelId: " foo-large ", + apiKey: " custom-test-key ", + providerId: " my-custom ", + }); + + expect(result).toEqual({ + baseUrl: "https://llm.example.com/v1", + modelId: "foo-large", + compatibility: "openai", + apiKey: "custom-test-key", + providerId: "my-custom", + }); + }); + + it("rejects missing required flags", () => { + expect(() => + parseNonInteractiveCustomApiFlags({ + baseUrl: "https://llm.example.com/v1", + }), + ).toThrow('Auth choice "custom-api-key" requires a base URL and model ID.'); + }); + + it("rejects invalid compatibility values", () => { + expect(() => + parseNonInteractiveCustomApiFlags({ + baseUrl: "https://llm.example.com/v1", + modelId: "foo-large", + compatibility: "xmlrpc", + }), + ).toThrow('Invalid --custom-compatibility (use "openai" or "anthropic").'); + }); + + it("rejects invalid explicit provider ids", () => { + expect(() => + parseNonInteractiveCustomApiFlags({ + baseUrl: "https://llm.example.com/v1", + modelId: "foo-large", + providerId: "!!!", + }), + ).toThrow("Custom provider ID must include letters, numbers, or hyphens."); + }); +}); diff --git a/src/commands/onboard-custom.ts b/src/commands/onboard-custom.ts index 6e82ff71fd..1beaf1c071 100644 --- a/src/commands/onboard-custom.ts +++ b/src/commands/onboard-custom.ts @@ -13,31 +13,84 @@ const DEFAULT_CONTEXT_WINDOW = 4096; const DEFAULT_MAX_TOKENS = 4096; const VERIFY_TIMEOUT_MS = 10000; -type CustomApiCompatibility = "openai" | "anthropic"; +export type CustomApiCompatibility = "openai" | "anthropic"; type CustomApiCompatibilityChoice = CustomApiCompatibility | "unknown"; -type CustomApiResult = { +export type CustomApiResult = { config: OpenClawConfig; providerId?: string; modelId?: string; + providerIdRenamedFrom?: string; +}; + +export type ApplyCustomApiConfigParams = { + config: OpenClawConfig; + baseUrl: string; + modelId: string; + compatibility: CustomApiCompatibility; + apiKey?: string; + providerId?: string; + alias?: string; +}; + +export type ParseNonInteractiveCustomApiFlagsParams = { + baseUrl?: string; + modelId?: string; + compatibility?: string; + apiKey?: string; + providerId?: string; +}; + +export type ParsedNonInteractiveCustomApiFlags = { + baseUrl: string; + modelId: string; + compatibility: CustomApiCompatibility; + apiKey?: string; + providerId?: string; +}; + +export type CustomApiErrorCode = + | "missing_required" + | "invalid_compatibility" + | "invalid_base_url" + | "invalid_model_id" + | "invalid_provider_id" + | "invalid_alias"; + +export class CustomApiError extends Error { + readonly code: CustomApiErrorCode; + + constructor(code: CustomApiErrorCode, message: string) { + super(message); + this.name = "CustomApiError"; + this.code = code; + } +} + +export type ResolveCustomProviderIdParams = { + config: OpenClawConfig; + baseUrl: string; + providerId?: string; +}; + +export type ResolvedCustomProviderId = { + providerId: string; + providerIdRenamedFrom?: string; }; const COMPATIBILITY_OPTIONS: Array<{ value: CustomApiCompatibilityChoice; label: string; hint: string; - api?: "openai-completions" | "anthropic-messages"; }> = [ { value: "openai", label: "OpenAI-compatible", hint: "Uses /chat/completions", - api: "openai-completions", }, { value: "anthropic", label: "Anthropic-compatible", hint: "Uses /messages", - api: "anthropic-messages", }, { value: "unknown", @@ -246,6 +299,191 @@ async function promptBaseUrlAndKey(params: { return { baseUrl: baseUrlInput.trim(), apiKey: apiKeyInput.trim() }; } +function resolveProviderApi( + compatibility: CustomApiCompatibility, +): "openai-completions" | "anthropic-messages" { + return compatibility === "anthropic" ? "anthropic-messages" : "openai-completions"; +} + +function parseCustomApiCompatibility(raw?: string): CustomApiCompatibility { + const compatibilityRaw = raw?.trim().toLowerCase(); + if (!compatibilityRaw) { + return "openai"; + } + if (compatibilityRaw !== "openai" && compatibilityRaw !== "anthropic") { + throw new CustomApiError( + "invalid_compatibility", + 'Invalid --custom-compatibility (use "openai" or "anthropic").', + ); + } + return compatibilityRaw; +} + +export function resolveCustomProviderId( + params: ResolveCustomProviderIdParams, +): ResolvedCustomProviderId { + const providers = params.config.models?.providers ?? {}; + const baseUrl = params.baseUrl.trim(); + const explicitProviderId = params.providerId?.trim(); + if (explicitProviderId && !normalizeEndpointId(explicitProviderId)) { + throw new CustomApiError( + "invalid_provider_id", + "Custom provider ID must include letters, numbers, or hyphens.", + ); + } + const requestedProviderId = explicitProviderId || buildEndpointIdFromUrl(baseUrl); + const providerIdResult = resolveUniqueEndpointId({ + requestedId: requestedProviderId, + baseUrl, + providers, + }); + + return { + providerId: providerIdResult.providerId, + ...(providerIdResult.renamed + ? { + providerIdRenamedFrom: normalizeEndpointId(requestedProviderId) || "custom", + } + : {}), + }; +} + +export function parseNonInteractiveCustomApiFlags( + params: ParseNonInteractiveCustomApiFlagsParams, +): ParsedNonInteractiveCustomApiFlags { + const baseUrl = params.baseUrl?.trim() ?? ""; + const modelId = params.modelId?.trim() ?? ""; + if (!baseUrl || !modelId) { + throw new CustomApiError( + "missing_required", + [ + 'Auth choice "custom-api-key" requires a base URL and model ID.', + "Use --custom-base-url and --custom-model-id.", + ].join("\n"), + ); + } + + const apiKey = params.apiKey?.trim(); + const providerId = params.providerId?.trim(); + if (providerId && !normalizeEndpointId(providerId)) { + throw new CustomApiError( + "invalid_provider_id", + "Custom provider ID must include letters, numbers, or hyphens.", + ); + } + return { + baseUrl, + modelId, + compatibility: parseCustomApiCompatibility(params.compatibility), + ...(apiKey ? { apiKey } : {}), + ...(providerId ? { providerId } : {}), + }; +} + +export function applyCustomApiConfig(params: ApplyCustomApiConfigParams): CustomApiResult { + const baseUrl = params.baseUrl.trim(); + try { + new URL(baseUrl); + } catch { + throw new CustomApiError("invalid_base_url", "Custom provider base URL must be a valid URL."); + } + + if (params.compatibility !== "openai" && params.compatibility !== "anthropic") { + throw new CustomApiError( + "invalid_compatibility", + 'Custom provider compatibility must be "openai" or "anthropic".', + ); + } + + const modelId = params.modelId.trim(); + if (!modelId) { + throw new CustomApiError("invalid_model_id", "Custom provider model ID is required."); + } + + const providerIdResult = resolveCustomProviderId({ + config: params.config, + baseUrl, + providerId: params.providerId, + }); + const providerId = providerIdResult.providerId; + const providers = params.config.models?.providers ?? {}; + + const modelRef = modelKey(providerId, modelId); + const alias = params.alias?.trim() ?? ""; + const aliasError = resolveAliasError({ + raw: alias, + cfg: params.config, + modelRef, + }); + if (aliasError) { + throw new CustomApiError("invalid_alias", aliasError); + } + + const existingProvider = providers[providerId]; + const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : []; + const hasModel = existingModels.some((model) => model.id === modelId); + const nextModel = { + id: modelId, + name: `${modelId} (Custom Provider)`, + contextWindow: DEFAULT_CONTEXT_WINDOW, + maxTokens: DEFAULT_MAX_TOKENS, + input: ["text"] as ["text"], + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, + reasoning: false, + }; + const mergedModels = hasModel ? existingModels : [...existingModels, nextModel]; + const { apiKey: existingApiKey, ...existingProviderRest } = existingProvider ?? {}; + const normalizedApiKey = + params.apiKey?.trim() || (existingApiKey ? existingApiKey.trim() : undefined); + + let config: OpenClawConfig = { + ...params.config, + models: { + ...params.config.models, + mode: params.config.models?.mode ?? "merge", + providers: { + ...providers, + [providerId]: { + ...existingProviderRest, + baseUrl, + api: resolveProviderApi(params.compatibility), + ...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}), + models: mergedModels.length > 0 ? mergedModels : [nextModel], + }, + }, + }, + }; + + config = applyPrimaryModel(config, modelRef); + if (alias) { + config = { + ...config, + agents: { + ...config.agents, + defaults: { + ...config.agents?.defaults, + models: { + ...config.agents?.defaults?.models, + [modelRef]: { + ...config.agents?.defaults?.models?.[modelRef], + alias, + }, + }, + }, + }, + }; + } + + return { + config, + providerId, + modelId, + ...(providerIdResult.providerIdRenamedFrom + ? { providerIdRenamedFrom: providerIdResult.providerIdRenamedFrom } + : {}), + }; +} + export async function promptCustomApiConfig(params: { prompter: WizardPrompter; runtime: RuntimeEnv; @@ -276,9 +514,6 @@ export async function promptCustomApiConfig(params: { let compatibility: CustomApiCompatibility | null = compatibilityChoice === "unknown" ? null : compatibilityChoice; - let providerApi = - COMPATIBILITY_OPTIONS.find((entry) => entry.value === compatibility)?.api ?? - "openai-completions"; while (true) { let verifiedFromProbe = false; @@ -288,14 +523,12 @@ export async function promptCustomApiConfig(params: { if (openaiProbe.ok) { probeSpinner.stop("Detected OpenAI-compatible endpoint."); compatibility = "openai"; - providerApi = "openai-completions"; verifiedFromProbe = true; } else { const anthropicProbe = await requestAnthropicVerification({ baseUrl, apiKey, modelId }); if (anthropicProbe.ok) { probeSpinner.stop("Detected Anthropic-compatible endpoint."); compatibility = "anthropic"; - providerApi = "anthropic-messages"; verifiedFromProbe = true; } else { probeSpinner.stop("Could not detect endpoint type."); @@ -395,82 +628,39 @@ export async function promptCustomApiConfig(params: { return undefined; }, }); - const providerIdResult = resolveUniqueEndpointId({ - requestedId: providerIdInput, - baseUrl, - providers, - }); - if (providerIdResult.renamed) { - await prompter.note( - `Endpoint ID "${providerIdInput}" already exists for a different base URL. Using "${providerIdResult.providerId}".`, - "Endpoint ID", - ); - } - const providerId = providerIdResult.providerId; - - const modelRef = modelKey(providerId, modelId); const aliasInput = await prompter.text({ message: "Model alias (optional)", placeholder: "e.g. local, ollama", initialValue: "", - validate: (value) => resolveAliasError({ raw: value, cfg: config, modelRef }), - }); - const alias = aliasInput.trim(); - - const existingProvider = providers[providerId]; - const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : []; - const hasModel = existingModels.some((model) => model.id === modelId); - const nextModel = { - id: modelId, - name: `${modelId} (Custom Provider)`, - contextWindow: DEFAULT_CONTEXT_WINDOW, - maxTokens: DEFAULT_MAX_TOKENS, - input: ["text"] as ["text"], - cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, - reasoning: false, - }; - const mergedModels = hasModel ? existingModels : [...existingModels, nextModel]; - const { apiKey: existingApiKey, ...existingProviderRest } = existingProvider ?? {}; - const normalizedApiKey = apiKey.trim() || (existingApiKey ? existingApiKey.trim() : undefined); - - let newConfig: OpenClawConfig = { - ...config, - models: { - ...config.models, - mode: config.models?.mode ?? "merge", - providers: { - ...providers, - [providerId]: { - ...existingProviderRest, - baseUrl, - api: providerApi, - ...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}), - models: mergedModels.length > 0 ? mergedModels : [nextModel], - }, - }, + validate: (value) => { + const requestedId = normalizeEndpointId(providerIdInput) || "custom"; + const providerIdResult = resolveUniqueEndpointId({ + requestedId, + baseUrl, + providers, + }); + const modelRef = modelKey(providerIdResult.providerId, modelId); + return resolveAliasError({ raw: value, cfg: config, modelRef }); }, - }; + }); + const resolvedCompatibility = compatibility ?? "openai"; + const result = applyCustomApiConfig({ + config, + baseUrl, + modelId, + compatibility: resolvedCompatibility, + apiKey, + providerId: providerIdInput, + alias: aliasInput, + }); - newConfig = applyPrimaryModel(newConfig, modelRef); - if (alias) { - newConfig = { - ...newConfig, - agents: { - ...newConfig.agents, - defaults: { - ...newConfig.agents?.defaults, - models: { - ...newConfig.agents?.defaults?.models, - [modelRef]: { - ...newConfig.agents?.defaults?.models?.[modelRef], - alias, - }, - }, - }, - }, - }; + if (result.providerIdRenamedFrom && result.providerId) { + await prompter.note( + `Endpoint ID "${result.providerIdRenamedFrom}" already exists for a different base URL. Using "${result.providerId}".`, + "Endpoint ID", + ); } - runtime.log(`Configured custom provider: ${providerId}/${modelId}`); - return { config: newConfig, providerId, modelId }; + runtime.log(`Configured custom provider: ${result.providerId}/${result.modelId}`); + return result; } diff --git a/src/commands/onboard-non-interactive.provider-auth.test.ts b/src/commands/onboard-non-interactive.provider-auth.test.ts index d3edb1891d..246c65c0ab 100644 --- a/src/commands/onboard-non-interactive.provider-auth.test.ts +++ b/src/commands/onboard-non-interactive.provider-auth.test.ts @@ -20,6 +20,7 @@ type EnvSnapshot = { skipCanvas: string | undefined; token: string | undefined; password: string | undefined; + customApiKey: string | undefined; disableConfigCache: string | undefined; }; @@ -39,6 +40,7 @@ function captureEnv(): EnvSnapshot { skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST, token: process.env.OPENCLAW_GATEWAY_TOKEN, password: process.env.OPENCLAW_GATEWAY_PASSWORD, + customApiKey: process.env.CUSTOM_API_KEY, disableConfigCache: process.env.OPENCLAW_DISABLE_CONFIG_CACHE, }; } @@ -61,6 +63,7 @@ function restoreEnv(prev: EnvSnapshot): void { restoreEnvVar("OPENCLAW_SKIP_CANVAS_HOST", prev.skipCanvas); restoreEnvVar("OPENCLAW_GATEWAY_TOKEN", prev.token); restoreEnvVar("OPENCLAW_GATEWAY_PASSWORD", prev.password); + restoreEnvVar("CUSTOM_API_KEY", prev.customApiKey); restoreEnvVar("OPENCLAW_DISABLE_CONFIG_CACHE", prev.disableConfigCache); } @@ -77,6 +80,7 @@ async function withOnboardEnv( process.env.OPENCLAW_DISABLE_CONFIG_CACHE = "1"; delete process.env.OPENCLAW_GATEWAY_TOKEN; delete process.env.OPENCLAW_GATEWAY_PASSWORD; + delete process.env.CUSTOM_API_KEY; const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), prefix)); const configPath = path.join(tempHome, "openclaw.json"); @@ -324,4 +328,240 @@ describe("onboard (non-interactive): provider auth", () => { }); }); }, 60_000); + + it("configures a custom provider from non-interactive flags", async () => { + await withOnboardEnv("openclaw-onboard-custom-provider-", async ({ configPath, runtime }) => { + await runNonInteractive( + { + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://llm.example.com/v1", + customApiKey: "custom-test-key", + customModelId: "foo-large", + customCompatibility: "anthropic", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ); + + const cfg = await readJsonFile<{ + models?: { + providers?: Record< + string, + { + baseUrl?: string; + api?: string; + apiKey?: string; + models?: Array<{ id?: string }>; + } + >; + }; + agents?: { defaults?: { model?: { primary?: string } } }; + }>(configPath); + + const provider = cfg.models?.providers?.["custom-llm-example-com"]; + expect(provider?.baseUrl).toBe("https://llm.example.com/v1"); + expect(provider?.api).toBe("anthropic-messages"); + expect(provider?.apiKey).toBe("custom-test-key"); + expect(provider?.models?.some((model) => model.id === "foo-large")).toBe(true); + expect(cfg.agents?.defaults?.model?.primary).toBe("custom-llm-example-com/foo-large"); + }); + }, 60_000); + + it("infers custom provider auth choice from custom flags", async () => { + await withOnboardEnv( + "openclaw-onboard-custom-provider-infer-", + async ({ configPath, runtime }) => { + await runNonInteractive( + { + nonInteractive: true, + customBaseUrl: "https://models.custom.local/v1", + customModelId: "local-large", + customApiKey: "custom-test-key", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ); + + const cfg = await readJsonFile<{ + models?: { + providers?: Record< + string, + { + baseUrl?: string; + api?: string; + } + >; + }; + agents?: { defaults?: { model?: { primary?: string } } }; + }>(configPath); + + expect(cfg.models?.providers?.["custom-models-custom-local"]?.baseUrl).toBe( + "https://models.custom.local/v1", + ); + expect(cfg.models?.providers?.["custom-models-custom-local"]?.api).toBe( + "openai-completions", + ); + expect(cfg.agents?.defaults?.model?.primary).toBe("custom-models-custom-local/local-large"); + }, + ); + }, 60_000); + + it("uses CUSTOM_API_KEY env fallback for non-interactive custom provider auth", async () => { + await withOnboardEnv( + "openclaw-onboard-custom-provider-env-fallback-", + async ({ configPath, runtime }) => { + process.env.CUSTOM_API_KEY = "custom-env-key"; + + await runNonInteractive( + { + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://models.custom.local/v1", + customModelId: "local-large", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ); + + const cfg = await readJsonFile<{ + models?: { + providers?: Record< + string, + { + apiKey?: string; + } + >; + }; + }>(configPath); + + expect(cfg.models?.providers?.["custom-models-custom-local"]?.apiKey).toBe( + "custom-env-key", + ); + }, + ); + }, 60_000); + + it("uses matching profile fallback for non-interactive custom provider auth", async () => { + await withOnboardEnv( + "openclaw-onboard-custom-provider-profile-fallback-", + async ({ configPath, runtime }) => { + const { upsertAuthProfile } = await import("../agents/auth-profiles.js"); + upsertAuthProfile({ + profileId: "custom-models-custom-local:default", + credential: { + type: "api_key", + provider: "custom-models-custom-local", + key: "custom-profile-key", + }, + }); + + await runNonInteractive( + { + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://models.custom.local/v1", + customModelId: "local-large", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ); + + const cfg = await readJsonFile<{ + models?: { + providers?: Record< + string, + { + apiKey?: string; + } + >; + }; + }>(configPath); + + expect(cfg.models?.providers?.["custom-models-custom-local"]?.apiKey).toBe( + "custom-profile-key", + ); + }, + ); + }, 60_000); + + it("fails custom provider auth when compatibility is invalid", async () => { + await withOnboardEnv( + "openclaw-onboard-custom-provider-invalid-compat-", + async ({ runtime }) => { + await expect( + runNonInteractive( + { + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://models.custom.local/v1", + customModelId: "local-large", + customCompatibility: "xmlrpc", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ), + ).rejects.toThrow('Invalid --custom-compatibility (use "openai" or "anthropic").'); + }, + ); + }, 60_000); + + it("fails custom provider auth when explicit provider id is invalid", async () => { + await withOnboardEnv("openclaw-onboard-custom-provider-invalid-id-", async ({ runtime }) => { + await expect( + runNonInteractive( + { + nonInteractive: true, + authChoice: "custom-api-key", + customBaseUrl: "https://models.custom.local/v1", + customModelId: "local-large", + customProviderId: "!!!", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ), + ).rejects.toThrow( + "Invalid custom provider config: Custom provider ID must include letters, numbers, or hyphens.", + ); + }); + }, 60_000); + + it("fails inferred custom auth when required flags are incomplete", async () => { + await withOnboardEnv( + "openclaw-onboard-custom-provider-missing-required-", + async ({ runtime }) => { + await expect( + runNonInteractive( + { + nonInteractive: true, + customApiKey: "custom-test-key", + skipHealth: true, + skipChannels: true, + skipSkills: true, + json: true, + }, + runtime, + ), + ).rejects.toThrow('Auth choice "custom-api-key" requires a base URL and model ID.'); + }, + ); + }, 60_000); }); diff --git a/src/commands/onboard-non-interactive/api-keys.ts b/src/commands/onboard-non-interactive/api-keys.ts index ad4580e889..fc03805f2a 100644 --- a/src/commands/onboard-non-interactive/api-keys.ts +++ b/src/commands/onboard-non-interactive/api-keys.ts @@ -45,9 +45,11 @@ export async function resolveNonInteractiveApiKey(params: { flagValue?: string; flagName: string; envVar: string; + envVarName?: string; runtime: RuntimeEnv; agentDir?: string; allowProfile?: boolean; + required?: boolean; }): Promise<{ key: string; source: NonInteractiveApiKeySource } | null> { const flagKey = normalizeOptionalSecretInput(params.flagValue); if (flagKey) { @@ -59,6 +61,14 @@ export async function resolveNonInteractiveApiKey(params: { return { key: envResolved.apiKey, source: "env" }; } + const explicitEnvVar = params.envVarName?.trim(); + if (explicitEnvVar) { + const explicitEnvKey = normalizeOptionalSecretInput(process.env[explicitEnvVar]); + if (explicitEnvKey) { + return { key: explicitEnvKey, source: "env" }; + } + } + if (params.allowProfile ?? true) { const profileKey = await resolveApiKeyFromProfiles({ provider: params.provider, @@ -70,6 +80,10 @@ export async function resolveNonInteractiveApiKey(params: { } } + if (params.required === false) { + return null; + } + const profileHint = params.allowProfile === false ? "" : `, or existing ${params.provider} API-key profile`; params.runtime.error(`Missing ${params.flagName} (or ${params.envVar} in env${profileHint}).`); diff --git a/src/commands/onboard-non-interactive/local/auth-choice-inference.ts b/src/commands/onboard-non-interactive/local/auth-choice-inference.ts index f3a7998531..610ae9b99d 100644 --- a/src/commands/onboard-non-interactive/local/auth-choice-inference.ts +++ b/src/commands/onboard-non-interactive/local/auth-choice-inference.ts @@ -24,6 +24,9 @@ type AuthChoiceFlagOptions = Pick< | "opencodeZenApiKey" | "xaiApiKey" | "litellmApiKey" + | "customBaseUrl" + | "customModelId" + | "customApiKey" >; const AUTH_CHOICE_FLAG_MAP = [ @@ -54,15 +57,27 @@ export type AuthChoiceInference = { matches: AuthChoiceFlag[]; }; +function hasStringValue(value: unknown): boolean { + return typeof value === "string" ? value.trim().length > 0 : Boolean(value); +} + // Infer auth choice from explicit provider API key flags. export function inferAuthChoiceFromFlags(opts: OnboardOptions): AuthChoiceInference { - const matches = AUTH_CHOICE_FLAG_MAP.filter(({ flag }) => { - const value = opts[flag]; - if (typeof value === "string") { - return value.trim().length > 0; - } - return Boolean(value); - }); + const matches: AuthChoiceFlag[] = AUTH_CHOICE_FLAG_MAP.filter(({ flag }) => + hasStringValue(opts[flag]), + ); + + if ( + hasStringValue(opts.customBaseUrl) || + hasStringValue(opts.customModelId) || + hasStringValue(opts.customApiKey) + ) { + matches.push({ + flag: "customBaseUrl", + authChoice: "custom-api-key", + label: "--custom-base-url/--custom-model-id/--custom-api-key", + }); + } return { choice: matches[0]?.authChoice, diff --git a/src/commands/onboard-non-interactive/local/auth-choice.ts b/src/commands/onboard-non-interactive/local/auth-choice.ts index b26673bb28..a2744b56cd 100644 --- a/src/commands/onboard-non-interactive/local/auth-choice.ts +++ b/src/commands/onboard-non-interactive/local/auth-choice.ts @@ -46,6 +46,12 @@ import { setXiaomiApiKey, setZaiApiKey, } from "../../onboard-auth.js"; +import { + applyCustomApiConfig, + CustomApiError, + parseNonInteractiveCustomApiFlags, + resolveCustomProviderId, +} from "../../onboard-custom.js"; import { applyOpenAIConfig } from "../../openai-model-default.js"; import { resolveNonInteractiveApiKey } from "../api-keys.js"; @@ -594,6 +600,65 @@ export async function applyNonInteractiveAuthChoice(params: { return applyTogetherConfig(nextConfig); } + if (authChoice === "custom-api-key") { + try { + const customAuth = parseNonInteractiveCustomApiFlags({ + baseUrl: opts.customBaseUrl, + modelId: opts.customModelId, + compatibility: opts.customCompatibility, + apiKey: opts.customApiKey, + providerId: opts.customProviderId, + }); + const resolvedProviderId = resolveCustomProviderId({ + config: nextConfig, + baseUrl: customAuth.baseUrl, + providerId: customAuth.providerId, + }); + const resolvedCustomApiKey = await resolveNonInteractiveApiKey({ + provider: resolvedProviderId.providerId, + cfg: baseConfig, + flagValue: customAuth.apiKey, + flagName: "--custom-api-key", + envVar: "CUSTOM_API_KEY", + envVarName: "CUSTOM_API_KEY", + runtime, + required: false, + }); + const result = applyCustomApiConfig({ + config: nextConfig, + baseUrl: customAuth.baseUrl, + modelId: customAuth.modelId, + compatibility: customAuth.compatibility, + apiKey: resolvedCustomApiKey?.key, + providerId: customAuth.providerId, + }); + if (result.providerIdRenamedFrom && result.providerId) { + runtime.log( + `Custom provider ID "${result.providerIdRenamedFrom}" already exists for a different base URL. Using "${result.providerId}".`, + ); + } + return result.config; + } catch (err) { + if (err instanceof CustomApiError) { + switch (err.code) { + case "missing_required": + case "invalid_compatibility": + runtime.error(err.message); + break; + default: + runtime.error(`Invalid custom provider config: ${err.message}`); + break; + } + runtime.exit(1); + return null; + } + const reason = err instanceof Error ? err.message : String(err); + runtime.error(`Invalid custom provider config: ${reason}`); + runtime.exit(1); + return null; + } + } + if ( authChoice === "oauth" || authChoice === "chutes" || diff --git a/src/commands/onboard-types.ts b/src/commands/onboard-types.ts index ec067cd6a4..70102902e1 100644 --- a/src/commands/onboard-types.ts +++ b/src/commands/onboard-types.ts @@ -107,6 +107,11 @@ export type OnboardOptions = { opencodeZenApiKey?: string; xaiApiKey?: string; qianfanApiKey?: string; + customBaseUrl?: string; + customApiKey?: string; + customModelId?: string; + customProviderId?: string; + customCompatibility?: "openai" | "anthropic"; gatewayPort?: number; gatewayBind?: GatewayBind; gatewayAuth?: GatewayAuthChoice;