onboard: support custom provider in non-interactive flow (#14223)

Merged via /review-pr -> /prepare-pr -> /merge-pr.

Prepared head SHA: 5b98d6514e
Co-authored-by: ENCHIGO <38551565+ENCHIGO@users.noreply.github.com>
Co-authored-by: gumadeiras <5599352+gumadeiras@users.noreply.github.com>
Reviewed-by: @gumadeiras
This commit is contained in:
ENCHIGO
2026-02-12 03:48:45 +08:00
committed by GitHub
parent c8d9733e41
commit 029b77c85b
13 changed files with 791 additions and 90 deletions

View File

@@ -303,7 +303,7 @@ Options:
- `--non-interactive`
- `--mode <local|remote>`
- `--flow <quickstart|advanced|manual>` (manual is an alias for advanced)
- `--auth-choice <setup-token|token|chutes|openai-codex|openai-api-key|openrouter-api-key|ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip>`
- `--auth-choice <setup-token|token|chutes|openai-codex|openai-api-key|openrouter-api-key|ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|custom-api-key|skip>`
- `--token-provider <id>` (non-interactive; used with `--auth-choice token`)
- `--token <token>` (non-interactive; used with `--auth-choice token`)
- `--token-profile-id <id>` (non-interactive; default: `<provider>:manual`)
@@ -318,6 +318,11 @@ Options:
- `--zai-api-key <key>`
- `--minimax-api-key <key>`
- `--opencode-zen-api-key <key>`
- `--custom-base-url <url>` (non-interactive; used with `--auth-choice custom-api-key`)
- `--custom-model-id <id>` (non-interactive; used with `--auth-choice custom-api-key`)
- `--custom-api-key <key>` (non-interactive; optional; used with `--auth-choice custom-api-key`; falls back to `CUSTOM_API_KEY` when omitted)
- `--custom-provider-id <id>` (non-interactive; optional custom provider id)
- `--custom-compatibility <openai|anthropic>` (non-interactive; optional; default `openai`)
- `--gateway-port <port>`
- `--gateway-bind <loopback|lan|tailnet|auto|custom>`
- `--gateway-auth <token|password>`

View File

@@ -26,6 +26,19 @@ openclaw onboard --flow manual
openclaw onboard --mode remote --remote-url ws://gateway-host:18789
```
Non-interactive custom provider:
```bash
openclaw onboard --non-interactive \
--auth-choice custom-api-key \
--custom-base-url "https://llm.example.com/v1" \
--custom-model-id "foo-large" \
--custom-api-key "$CUSTOM_API_KEY" \
--custom-compatibility openai
```
`--custom-api-key` is optional in non-interactive mode. If omitted, onboarding checks `CUSTOM_API_KEY`.
Flow notes:
- `quickstart`: minimal prompts, auto-generates a gateway token.

View File

@@ -106,6 +106,23 @@ Add `--json` for a machine-readable summary.
--gateway-bind loopback
```
</Accordion>
<Accordion title="Custom provider example">
```bash
openclaw onboard --non-interactive \
--mode local \
--auth-choice custom-api-key \
--custom-base-url "https://llm.example.com/v1" \
--custom-model-id "foo-large" \
--custom-api-key "$CUSTOM_API_KEY" \
--custom-provider-id "my-custom" \
--custom-compatibility anthropic \
--gateway-port 18789 \
--gateway-bind loopback
```
`--custom-api-key` is optional. If omitted, onboarding checks `CUSTOM_API_KEY`.
</Accordion>
</AccordionGroup>
## Add another agent

View File

@@ -175,6 +175,18 @@ What you set:
Moonshot (Kimi K2) and Kimi Coding configs are auto-written.
More detail: [Moonshot AI (Kimi + Kimi Coding)](/providers/moonshot).
</Accordion>
<Accordion title="Custom provider">
Works with OpenAI-compatible and Anthropic-compatible endpoints.
Non-interactive flags:
- `--auth-choice custom-api-key`
- `--custom-base-url`
- `--custom-model-id`
- `--custom-api-key` (optional; falls back to `CUSTOM_API_KEY`)
- `--custom-provider-id` (optional)
- `--custom-compatibility <openai|anthropic>` (optional; default `openai`)
</Accordion>
<Accordion title="Skip">
Leaves auth unconfigured.
</Accordion>

View File

@@ -228,6 +228,42 @@ describe("cli program (smoke)", () => {
}
});
it("passes custom provider flags to onboard", async () => {
const program = buildProgram();
await program.parseAsync(
[
"onboard",
"--non-interactive",
"--auth-choice",
"custom-api-key",
"--custom-base-url",
"https://llm.example.com/v1",
"--custom-api-key",
"sk-custom-test",
"--custom-model-id",
"foo-large",
"--custom-provider-id",
"my-custom",
"--custom-compatibility",
"anthropic",
],
{ from: "user" },
);
expect(onboardCommand).toHaveBeenCalledWith(
expect.objectContaining({
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://llm.example.com/v1",
customApiKey: "sk-custom-test",
customModelId: "foo-large",
customProviderId: "my-custom",
customCompatibility: "anthropic",
}),
runtime,
);
});
it("runs channels login", async () => {
const program = buildProgram();
await program.parseAsync(["channels", "login", "--account", "work"], {

View File

@@ -58,7 +58,7 @@ export function registerOnboardCommand(program: Command) {
.option("--mode <mode>", "Wizard mode: local|remote")
.option(
"--auth-choice <choice>",
"Auth: setup-token|token|chutes|openai-codex|openai-api-key|xai-api-key|qianfan-api-key|openrouter-api-key|litellm-api-key|ai-gateway-api-key|cloudflare-ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip|together-api-key",
"Auth: setup-token|token|chutes|openai-codex|openai-api-key|xai-api-key|qianfan-api-key|openrouter-api-key|litellm-api-key|ai-gateway-api-key|cloudflare-ai-gateway-api-key|moonshot-api-key|moonshot-api-key-cn|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|custom-api-key|skip|together-api-key",
)
.option(
"--token-provider <id>",
@@ -90,6 +90,14 @@ export function registerOnboardCommand(program: Command) {
.option("--xai-api-key <key>", "xAI API key")
.option("--litellm-api-key <key>", "LiteLLM API key")
.option("--qianfan-api-key <key>", "QIANFAN API key")
.option("--custom-base-url <url>", "Custom provider base URL")
.option("--custom-api-key <key>", "Custom provider API key (optional)")
.option("--custom-model-id <id>", "Custom provider model ID")
.option("--custom-provider-id <id>", "Custom provider ID (optional; auto-derived by default)")
.option(
"--custom-compatibility <mode>",
"Custom provider API compatibility: openai|anthropic (default: openai)",
)
.option("--gateway-port <port>", "Gateway port")
.option("--gateway-bind <mode>", "Gateway bind: loopback|tailnet|lan|auto|custom")
.option("--gateway-auth <mode>", "Gateway auth: token|password")
@@ -148,6 +156,11 @@ export function registerOnboardCommand(program: Command) {
opencodeZenApiKey: opts.opencodeZenApiKey as string | undefined,
xaiApiKey: opts.xaiApiKey as string | undefined,
litellmApiKey: opts.litellmApiKey as string | undefined,
customBaseUrl: opts.customBaseUrl as string | undefined,
customApiKey: opts.customApiKey as string | undefined,
customModelId: opts.customModelId as string | undefined,
customProviderId: opts.customProviderId as string | undefined,
customCompatibility: opts.customCompatibility as "openai" | "anthropic" | undefined,
gatewayPort:
typeof gatewayPort === "number" && Number.isFinite(gatewayPort)
? gatewayPort

View File

@@ -1,6 +1,10 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { defaultRuntime } from "../runtime.js";
import { promptCustomApiConfig } from "./onboard-custom.js";
import {
applyCustomApiConfig,
parseNonInteractiveCustomApiFlags,
promptCustomApiConfig,
} from "./onboard-custom.js";
// Mock dependencies
vi.mock("./model-picker.js", () => ({
@@ -268,3 +272,75 @@ describe("promptCustomApiConfig", () => {
expect(prompter.text).toHaveBeenCalledTimes(6);
});
});
describe("applyCustomApiConfig", () => {
it("rejects invalid compatibility values at runtime", () => {
expect(() =>
applyCustomApiConfig({
config: {},
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "invalid" as unknown as "openai",
}),
).toThrow('Custom provider compatibility must be "openai" or "anthropic".');
});
it("rejects explicit provider ids that normalize to empty", () => {
expect(() =>
applyCustomApiConfig({
config: {},
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "openai",
providerId: "!!!",
}),
).toThrow("Custom provider ID must include letters, numbers, or hyphens.");
});
});
describe("parseNonInteractiveCustomApiFlags", () => {
it("parses required flags and defaults compatibility to openai", () => {
const result = parseNonInteractiveCustomApiFlags({
baseUrl: " https://llm.example.com/v1 ",
modelId: " foo-large ",
apiKey: " custom-test-key ",
providerId: " my-custom ",
});
expect(result).toEqual({
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "openai",
apiKey: "custom-test-key",
providerId: "my-custom",
});
});
it("rejects missing required flags", () => {
expect(() =>
parseNonInteractiveCustomApiFlags({
baseUrl: "https://llm.example.com/v1",
}),
).toThrow('Auth choice "custom-api-key" requires a base URL and model ID.');
});
it("rejects invalid compatibility values", () => {
expect(() =>
parseNonInteractiveCustomApiFlags({
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "xmlrpc",
}),
).toThrow('Invalid --custom-compatibility (use "openai" or "anthropic").');
});
it("rejects invalid explicit provider ids", () => {
expect(() =>
parseNonInteractiveCustomApiFlags({
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
providerId: "!!!",
}),
).toThrow("Custom provider ID must include letters, numbers, or hyphens.");
});
});

View File

@@ -13,31 +13,84 @@ const DEFAULT_CONTEXT_WINDOW = 4096;
const DEFAULT_MAX_TOKENS = 4096;
const VERIFY_TIMEOUT_MS = 10000;
type CustomApiCompatibility = "openai" | "anthropic";
export type CustomApiCompatibility = "openai" | "anthropic";
type CustomApiCompatibilityChoice = CustomApiCompatibility | "unknown";
type CustomApiResult = {
export type CustomApiResult = {
config: OpenClawConfig;
providerId?: string;
modelId?: string;
providerIdRenamedFrom?: string;
};
export type ApplyCustomApiConfigParams = {
config: OpenClawConfig;
baseUrl: string;
modelId: string;
compatibility: CustomApiCompatibility;
apiKey?: string;
providerId?: string;
alias?: string;
};
export type ParseNonInteractiveCustomApiFlagsParams = {
baseUrl?: string;
modelId?: string;
compatibility?: string;
apiKey?: string;
providerId?: string;
};
export type ParsedNonInteractiveCustomApiFlags = {
baseUrl: string;
modelId: string;
compatibility: CustomApiCompatibility;
apiKey?: string;
providerId?: string;
};
export type CustomApiErrorCode =
| "missing_required"
| "invalid_compatibility"
| "invalid_base_url"
| "invalid_model_id"
| "invalid_provider_id"
| "invalid_alias";
export class CustomApiError extends Error {
readonly code: CustomApiErrorCode;
constructor(code: CustomApiErrorCode, message: string) {
super(message);
this.name = "CustomApiError";
this.code = code;
}
}
export type ResolveCustomProviderIdParams = {
config: OpenClawConfig;
baseUrl: string;
providerId?: string;
};
export type ResolvedCustomProviderId = {
providerId: string;
providerIdRenamedFrom?: string;
};
const COMPATIBILITY_OPTIONS: Array<{
value: CustomApiCompatibilityChoice;
label: string;
hint: string;
api?: "openai-completions" | "anthropic-messages";
}> = [
{
value: "openai",
label: "OpenAI-compatible",
hint: "Uses /chat/completions",
api: "openai-completions",
},
{
value: "anthropic",
label: "Anthropic-compatible",
hint: "Uses /messages",
api: "anthropic-messages",
},
{
value: "unknown",
@@ -246,6 +299,191 @@ async function promptBaseUrlAndKey(params: {
return { baseUrl: baseUrlInput.trim(), apiKey: apiKeyInput.trim() };
}
function resolveProviderApi(
compatibility: CustomApiCompatibility,
): "openai-completions" | "anthropic-messages" {
return compatibility === "anthropic" ? "anthropic-messages" : "openai-completions";
}
function parseCustomApiCompatibility(raw?: string): CustomApiCompatibility {
const compatibilityRaw = raw?.trim().toLowerCase();
if (!compatibilityRaw) {
return "openai";
}
if (compatibilityRaw !== "openai" && compatibilityRaw !== "anthropic") {
throw new CustomApiError(
"invalid_compatibility",
'Invalid --custom-compatibility (use "openai" or "anthropic").',
);
}
return compatibilityRaw;
}
export function resolveCustomProviderId(
params: ResolveCustomProviderIdParams,
): ResolvedCustomProviderId {
const providers = params.config.models?.providers ?? {};
const baseUrl = params.baseUrl.trim();
const explicitProviderId = params.providerId?.trim();
if (explicitProviderId && !normalizeEndpointId(explicitProviderId)) {
throw new CustomApiError(
"invalid_provider_id",
"Custom provider ID must include letters, numbers, or hyphens.",
);
}
const requestedProviderId = explicitProviderId || buildEndpointIdFromUrl(baseUrl);
const providerIdResult = resolveUniqueEndpointId({
requestedId: requestedProviderId,
baseUrl,
providers,
});
return {
providerId: providerIdResult.providerId,
...(providerIdResult.renamed
? {
providerIdRenamedFrom: normalizeEndpointId(requestedProviderId) || "custom",
}
: {}),
};
}
export function parseNonInteractiveCustomApiFlags(
params: ParseNonInteractiveCustomApiFlagsParams,
): ParsedNonInteractiveCustomApiFlags {
const baseUrl = params.baseUrl?.trim() ?? "";
const modelId = params.modelId?.trim() ?? "";
if (!baseUrl || !modelId) {
throw new CustomApiError(
"missing_required",
[
'Auth choice "custom-api-key" requires a base URL and model ID.',
"Use --custom-base-url and --custom-model-id.",
].join("\n"),
);
}
const apiKey = params.apiKey?.trim();
const providerId = params.providerId?.trim();
if (providerId && !normalizeEndpointId(providerId)) {
throw new CustomApiError(
"invalid_provider_id",
"Custom provider ID must include letters, numbers, or hyphens.",
);
}
return {
baseUrl,
modelId,
compatibility: parseCustomApiCompatibility(params.compatibility),
...(apiKey ? { apiKey } : {}),
...(providerId ? { providerId } : {}),
};
}
export function applyCustomApiConfig(params: ApplyCustomApiConfigParams): CustomApiResult {
const baseUrl = params.baseUrl.trim();
try {
new URL(baseUrl);
} catch {
throw new CustomApiError("invalid_base_url", "Custom provider base URL must be a valid URL.");
}
if (params.compatibility !== "openai" && params.compatibility !== "anthropic") {
throw new CustomApiError(
"invalid_compatibility",
'Custom provider compatibility must be "openai" or "anthropic".',
);
}
const modelId = params.modelId.trim();
if (!modelId) {
throw new CustomApiError("invalid_model_id", "Custom provider model ID is required.");
}
const providerIdResult = resolveCustomProviderId({
config: params.config,
baseUrl,
providerId: params.providerId,
});
const providerId = providerIdResult.providerId;
const providers = params.config.models?.providers ?? {};
const modelRef = modelKey(providerId, modelId);
const alias = params.alias?.trim() ?? "";
const aliasError = resolveAliasError({
raw: alias,
cfg: params.config,
modelRef,
});
if (aliasError) {
throw new CustomApiError("invalid_alias", aliasError);
}
const existingProvider = providers[providerId];
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
const hasModel = existingModels.some((model) => model.id === modelId);
const nextModel = {
id: modelId,
name: `${modelId} (Custom Provider)`,
contextWindow: DEFAULT_CONTEXT_WINDOW,
maxTokens: DEFAULT_MAX_TOKENS,
input: ["text"] as ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
reasoning: false,
};
const mergedModels = hasModel ? existingModels : [...existingModels, nextModel];
const { apiKey: existingApiKey, ...existingProviderRest } = existingProvider ?? {};
const normalizedApiKey =
params.apiKey?.trim() || (existingApiKey ? existingApiKey.trim() : undefined);
let config: OpenClawConfig = {
...params.config,
models: {
...params.config.models,
mode: params.config.models?.mode ?? "merge",
providers: {
...providers,
[providerId]: {
...existingProviderRest,
baseUrl,
api: resolveProviderApi(params.compatibility),
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [nextModel],
},
},
},
};
config = applyPrimaryModel(config, modelRef);
if (alias) {
config = {
...config,
agents: {
...config.agents,
defaults: {
...config.agents?.defaults,
models: {
...config.agents?.defaults?.models,
[modelRef]: {
...config.agents?.defaults?.models?.[modelRef],
alias,
},
},
},
},
};
}
return {
config,
providerId,
modelId,
...(providerIdResult.providerIdRenamedFrom
? { providerIdRenamedFrom: providerIdResult.providerIdRenamedFrom }
: {}),
};
}
export async function promptCustomApiConfig(params: {
prompter: WizardPrompter;
runtime: RuntimeEnv;
@@ -276,9 +514,6 @@ export async function promptCustomApiConfig(params: {
let compatibility: CustomApiCompatibility | null =
compatibilityChoice === "unknown" ? null : compatibilityChoice;
let providerApi =
COMPATIBILITY_OPTIONS.find((entry) => entry.value === compatibility)?.api ??
"openai-completions";
while (true) {
let verifiedFromProbe = false;
@@ -288,14 +523,12 @@ export async function promptCustomApiConfig(params: {
if (openaiProbe.ok) {
probeSpinner.stop("Detected OpenAI-compatible endpoint.");
compatibility = "openai";
providerApi = "openai-completions";
verifiedFromProbe = true;
} else {
const anthropicProbe = await requestAnthropicVerification({ baseUrl, apiKey, modelId });
if (anthropicProbe.ok) {
probeSpinner.stop("Detected Anthropic-compatible endpoint.");
compatibility = "anthropic";
providerApi = "anthropic-messages";
verifiedFromProbe = true;
} else {
probeSpinner.stop("Could not detect endpoint type.");
@@ -395,82 +628,39 @@ export async function promptCustomApiConfig(params: {
return undefined;
},
});
const providerIdResult = resolveUniqueEndpointId({
requestedId: providerIdInput,
baseUrl,
providers,
});
if (providerIdResult.renamed) {
await prompter.note(
`Endpoint ID "${providerIdInput}" already exists for a different base URL. Using "${providerIdResult.providerId}".`,
"Endpoint ID",
);
}
const providerId = providerIdResult.providerId;
const modelRef = modelKey(providerId, modelId);
const aliasInput = await prompter.text({
message: "Model alias (optional)",
placeholder: "e.g. local, ollama",
initialValue: "",
validate: (value) => resolveAliasError({ raw: value, cfg: config, modelRef }),
});
const alias = aliasInput.trim();
const existingProvider = providers[providerId];
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
const hasModel = existingModels.some((model) => model.id === modelId);
const nextModel = {
id: modelId,
name: `${modelId} (Custom Provider)`,
contextWindow: DEFAULT_CONTEXT_WINDOW,
maxTokens: DEFAULT_MAX_TOKENS,
input: ["text"] as ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
reasoning: false,
};
const mergedModels = hasModel ? existingModels : [...existingModels, nextModel];
const { apiKey: existingApiKey, ...existingProviderRest } = existingProvider ?? {};
const normalizedApiKey = apiKey.trim() || (existingApiKey ? existingApiKey.trim() : undefined);
let newConfig: OpenClawConfig = {
...config,
models: {
...config.models,
mode: config.models?.mode ?? "merge",
providers: {
...providers,
[providerId]: {
...existingProviderRest,
baseUrl,
api: providerApi,
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : [nextModel],
},
},
validate: (value) => {
const requestedId = normalizeEndpointId(providerIdInput) || "custom";
const providerIdResult = resolveUniqueEndpointId({
requestedId,
baseUrl,
providers,
});
const modelRef = modelKey(providerIdResult.providerId, modelId);
return resolveAliasError({ raw: value, cfg: config, modelRef });
},
};
});
const resolvedCompatibility = compatibility ?? "openai";
const result = applyCustomApiConfig({
config,
baseUrl,
modelId,
compatibility: resolvedCompatibility,
apiKey,
providerId: providerIdInput,
alias: aliasInput,
});
newConfig = applyPrimaryModel(newConfig, modelRef);
if (alias) {
newConfig = {
...newConfig,
agents: {
...newConfig.agents,
defaults: {
...newConfig.agents?.defaults,
models: {
...newConfig.agents?.defaults?.models,
[modelRef]: {
...newConfig.agents?.defaults?.models?.[modelRef],
alias,
},
},
},
},
};
if (result.providerIdRenamedFrom && result.providerId) {
await prompter.note(
`Endpoint ID "${result.providerIdRenamedFrom}" already exists for a different base URL. Using "${result.providerId}".`,
"Endpoint ID",
);
}
runtime.log(`Configured custom provider: ${providerId}/${modelId}`);
return { config: newConfig, providerId, modelId };
runtime.log(`Configured custom provider: ${result.providerId}/${result.modelId}`);
return result;
}

View File

@@ -20,6 +20,7 @@ type EnvSnapshot = {
skipCanvas: string | undefined;
token: string | undefined;
password: string | undefined;
customApiKey: string | undefined;
disableConfigCache: string | undefined;
};
@@ -39,6 +40,7 @@ function captureEnv(): EnvSnapshot {
skipCanvas: process.env.OPENCLAW_SKIP_CANVAS_HOST,
token: process.env.OPENCLAW_GATEWAY_TOKEN,
password: process.env.OPENCLAW_GATEWAY_PASSWORD,
customApiKey: process.env.CUSTOM_API_KEY,
disableConfigCache: process.env.OPENCLAW_DISABLE_CONFIG_CACHE,
};
}
@@ -61,6 +63,7 @@ function restoreEnv(prev: EnvSnapshot): void {
restoreEnvVar("OPENCLAW_SKIP_CANVAS_HOST", prev.skipCanvas);
restoreEnvVar("OPENCLAW_GATEWAY_TOKEN", prev.token);
restoreEnvVar("OPENCLAW_GATEWAY_PASSWORD", prev.password);
restoreEnvVar("CUSTOM_API_KEY", prev.customApiKey);
restoreEnvVar("OPENCLAW_DISABLE_CONFIG_CACHE", prev.disableConfigCache);
}
@@ -77,6 +80,7 @@ async function withOnboardEnv(
process.env.OPENCLAW_DISABLE_CONFIG_CACHE = "1";
delete process.env.OPENCLAW_GATEWAY_TOKEN;
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
delete process.env.CUSTOM_API_KEY;
const tempHome = await fs.mkdtemp(path.join(os.tmpdir(), prefix));
const configPath = path.join(tempHome, "openclaw.json");
@@ -324,4 +328,240 @@ describe("onboard (non-interactive): provider auth", () => {
});
});
}, 60_000);
it("configures a custom provider from non-interactive flags", async () => {
await withOnboardEnv("openclaw-onboard-custom-provider-", async ({ configPath, runtime }) => {
await runNonInteractive(
{
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://llm.example.com/v1",
customApiKey: "custom-test-key",
customModelId: "foo-large",
customCompatibility: "anthropic",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const cfg = await readJsonFile<{
models?: {
providers?: Record<
string,
{
baseUrl?: string;
api?: string;
apiKey?: string;
models?: Array<{ id?: string }>;
}
>;
};
agents?: { defaults?: { model?: { primary?: string } } };
}>(configPath);
const provider = cfg.models?.providers?.["custom-llm-example-com"];
expect(provider?.baseUrl).toBe("https://llm.example.com/v1");
expect(provider?.api).toBe("anthropic-messages");
expect(provider?.apiKey).toBe("custom-test-key");
expect(provider?.models?.some((model) => model.id === "foo-large")).toBe(true);
expect(cfg.agents?.defaults?.model?.primary).toBe("custom-llm-example-com/foo-large");
});
}, 60_000);
it("infers custom provider auth choice from custom flags", async () => {
await withOnboardEnv(
"openclaw-onboard-custom-provider-infer-",
async ({ configPath, runtime }) => {
await runNonInteractive(
{
nonInteractive: true,
customBaseUrl: "https://models.custom.local/v1",
customModelId: "local-large",
customApiKey: "custom-test-key",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const cfg = await readJsonFile<{
models?: {
providers?: Record<
string,
{
baseUrl?: string;
api?: string;
}
>;
};
agents?: { defaults?: { model?: { primary?: string } } };
}>(configPath);
expect(cfg.models?.providers?.["custom-models-custom-local"]?.baseUrl).toBe(
"https://models.custom.local/v1",
);
expect(cfg.models?.providers?.["custom-models-custom-local"]?.api).toBe(
"openai-completions",
);
expect(cfg.agents?.defaults?.model?.primary).toBe("custom-models-custom-local/local-large");
},
);
}, 60_000);
it("uses CUSTOM_API_KEY env fallback for non-interactive custom provider auth", async () => {
await withOnboardEnv(
"openclaw-onboard-custom-provider-env-fallback-",
async ({ configPath, runtime }) => {
process.env.CUSTOM_API_KEY = "custom-env-key";
await runNonInteractive(
{
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://models.custom.local/v1",
customModelId: "local-large",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const cfg = await readJsonFile<{
models?: {
providers?: Record<
string,
{
apiKey?: string;
}
>;
};
}>(configPath);
expect(cfg.models?.providers?.["custom-models-custom-local"]?.apiKey).toBe(
"custom-env-key",
);
},
);
}, 60_000);
it("uses matching profile fallback for non-interactive custom provider auth", async () => {
await withOnboardEnv(
"openclaw-onboard-custom-provider-profile-fallback-",
async ({ configPath, runtime }) => {
const { upsertAuthProfile } = await import("../agents/auth-profiles.js");
upsertAuthProfile({
profileId: "custom-models-custom-local:default",
credential: {
type: "api_key",
provider: "custom-models-custom-local",
key: "custom-profile-key",
},
});
await runNonInteractive(
{
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://models.custom.local/v1",
customModelId: "local-large",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
);
const cfg = await readJsonFile<{
models?: {
providers?: Record<
string,
{
apiKey?: string;
}
>;
};
}>(configPath);
expect(cfg.models?.providers?.["custom-models-custom-local"]?.apiKey).toBe(
"custom-profile-key",
);
},
);
}, 60_000);
it("fails custom provider auth when compatibility is invalid", async () => {
await withOnboardEnv(
"openclaw-onboard-custom-provider-invalid-compat-",
async ({ runtime }) => {
await expect(
runNonInteractive(
{
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://models.custom.local/v1",
customModelId: "local-large",
customCompatibility: "xmlrpc",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
),
).rejects.toThrow('Invalid --custom-compatibility (use "openai" or "anthropic").');
},
);
}, 60_000);
it("fails custom provider auth when explicit provider id is invalid", async () => {
await withOnboardEnv("openclaw-onboard-custom-provider-invalid-id-", async ({ runtime }) => {
await expect(
runNonInteractive(
{
nonInteractive: true,
authChoice: "custom-api-key",
customBaseUrl: "https://models.custom.local/v1",
customModelId: "local-large",
customProviderId: "!!!",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
),
).rejects.toThrow(
"Invalid custom provider config: Custom provider ID must include letters, numbers, or hyphens.",
);
});
}, 60_000);
it("fails inferred custom auth when required flags are incomplete", async () => {
await withOnboardEnv(
"openclaw-onboard-custom-provider-missing-required-",
async ({ runtime }) => {
await expect(
runNonInteractive(
{
nonInteractive: true,
customApiKey: "custom-test-key",
skipHealth: true,
skipChannels: true,
skipSkills: true,
json: true,
},
runtime,
),
).rejects.toThrow('Auth choice "custom-api-key" requires a base URL and model ID.');
},
);
}, 60_000);
});

View File

@@ -45,9 +45,11 @@ export async function resolveNonInteractiveApiKey(params: {
flagValue?: string;
flagName: string;
envVar: string;
envVarName?: string;
runtime: RuntimeEnv;
agentDir?: string;
allowProfile?: boolean;
required?: boolean;
}): Promise<{ key: string; source: NonInteractiveApiKeySource } | null> {
const flagKey = normalizeOptionalSecretInput(params.flagValue);
if (flagKey) {
@@ -59,6 +61,14 @@ export async function resolveNonInteractiveApiKey(params: {
return { key: envResolved.apiKey, source: "env" };
}
const explicitEnvVar = params.envVarName?.trim();
if (explicitEnvVar) {
const explicitEnvKey = normalizeOptionalSecretInput(process.env[explicitEnvVar]);
if (explicitEnvKey) {
return { key: explicitEnvKey, source: "env" };
}
}
if (params.allowProfile ?? true) {
const profileKey = await resolveApiKeyFromProfiles({
provider: params.provider,
@@ -70,6 +80,10 @@ export async function resolveNonInteractiveApiKey(params: {
}
}
if (params.required === false) {
return null;
}
const profileHint =
params.allowProfile === false ? "" : `, or existing ${params.provider} API-key profile`;
params.runtime.error(`Missing ${params.flagName} (or ${params.envVar} in env${profileHint}).`);

View File

@@ -24,6 +24,9 @@ type AuthChoiceFlagOptions = Pick<
| "opencodeZenApiKey"
| "xaiApiKey"
| "litellmApiKey"
| "customBaseUrl"
| "customModelId"
| "customApiKey"
>;
const AUTH_CHOICE_FLAG_MAP = [
@@ -54,15 +57,27 @@ export type AuthChoiceInference = {
matches: AuthChoiceFlag[];
};
function hasStringValue(value: unknown): boolean {
return typeof value === "string" ? value.trim().length > 0 : Boolean(value);
}
// Infer auth choice from explicit provider API key flags.
export function inferAuthChoiceFromFlags(opts: OnboardOptions): AuthChoiceInference {
const matches = AUTH_CHOICE_FLAG_MAP.filter(({ flag }) => {
const value = opts[flag];
if (typeof value === "string") {
return value.trim().length > 0;
}
return Boolean(value);
});
const matches: AuthChoiceFlag[] = AUTH_CHOICE_FLAG_MAP.filter(({ flag }) =>
hasStringValue(opts[flag]),
);
if (
hasStringValue(opts.customBaseUrl) ||
hasStringValue(opts.customModelId) ||
hasStringValue(opts.customApiKey)
) {
matches.push({
flag: "customBaseUrl",
authChoice: "custom-api-key",
label: "--custom-base-url/--custom-model-id/--custom-api-key",
});
}
return {
choice: matches[0]?.authChoice,

View File

@@ -46,6 +46,12 @@ import {
setXiaomiApiKey,
setZaiApiKey,
} from "../../onboard-auth.js";
import {
applyCustomApiConfig,
CustomApiError,
parseNonInteractiveCustomApiFlags,
resolveCustomProviderId,
} from "../../onboard-custom.js";
import { applyOpenAIConfig } from "../../openai-model-default.js";
import { resolveNonInteractiveApiKey } from "../api-keys.js";
@@ -594,6 +600,65 @@ export async function applyNonInteractiveAuthChoice(params: {
return applyTogetherConfig(nextConfig);
}
if (authChoice === "custom-api-key") {
try {
const customAuth = parseNonInteractiveCustomApiFlags({
baseUrl: opts.customBaseUrl,
modelId: opts.customModelId,
compatibility: opts.customCompatibility,
apiKey: opts.customApiKey,
providerId: opts.customProviderId,
});
const resolvedProviderId = resolveCustomProviderId({
config: nextConfig,
baseUrl: customAuth.baseUrl,
providerId: customAuth.providerId,
});
const resolvedCustomApiKey = await resolveNonInteractiveApiKey({
provider: resolvedProviderId.providerId,
cfg: baseConfig,
flagValue: customAuth.apiKey,
flagName: "--custom-api-key",
envVar: "CUSTOM_API_KEY",
envVarName: "CUSTOM_API_KEY",
runtime,
required: false,
});
const result = applyCustomApiConfig({
config: nextConfig,
baseUrl: customAuth.baseUrl,
modelId: customAuth.modelId,
compatibility: customAuth.compatibility,
apiKey: resolvedCustomApiKey?.key,
providerId: customAuth.providerId,
});
if (result.providerIdRenamedFrom && result.providerId) {
runtime.log(
`Custom provider ID "${result.providerIdRenamedFrom}" already exists for a different base URL. Using "${result.providerId}".`,
);
}
return result.config;
} catch (err) {
if (err instanceof CustomApiError) {
switch (err.code) {
case "missing_required":
case "invalid_compatibility":
runtime.error(err.message);
break;
default:
runtime.error(`Invalid custom provider config: ${err.message}`);
break;
}
runtime.exit(1);
return null;
}
const reason = err instanceof Error ? err.message : String(err);
runtime.error(`Invalid custom provider config: ${reason}`);
runtime.exit(1);
return null;
}
}
if (
authChoice === "oauth" ||
authChoice === "chutes" ||

View File

@@ -107,6 +107,11 @@ export type OnboardOptions = {
opencodeZenApiKey?: string;
xaiApiKey?: string;
qianfanApiKey?: string;
customBaseUrl?: string;
customApiKey?: string;
customModelId?: string;
customProviderId?: string;
customCompatibility?: "openai" | "anthropic";
gatewayPort?: number;
gatewayBind?: GatewayBind;
gatewayAuth?: GatewayAuthChoice;