mirror of
https://github.com/openclaw/openclaw.git
synced 2026-02-19 18:39:20 -05:00
fix: align opencode-zen provider setup
This commit is contained in:
162
docs/concepts/model-providers.md
Normal file
162
docs/concepts/model-providers.md
Normal file
@@ -0,0 +1,162 @@
|
||||
---
|
||||
summary: "Model provider overview with example configs + CLI flows"
|
||||
read_when:
|
||||
- You need a provider-by-provider model setup reference
|
||||
- You want example configs or CLI onboarding commands for model providers
|
||||
---
|
||||
# Model providers
|
||||
|
||||
This page covers **LLM/model providers** (not chat providers like WhatsApp/Telegram).
|
||||
For model selection rules, see [/concepts/models](/concepts/models).
|
||||
|
||||
## Quick rules
|
||||
|
||||
- Model refs use `provider/model` (example: `opencode/claude-opus-4-5`).
|
||||
- If you set `agents.defaults.models`, it becomes the allowlist.
|
||||
- CLI helpers: `clawdbot onboard`, `clawdbot models list`, `clawdbot models set <provider/model>`.
|
||||
|
||||
## Built-in providers (pi-ai catalog)
|
||||
|
||||
Clawdbot ships with the pi‑ai catalog. These providers require **no**
|
||||
`models.providers` config; just set auth + pick a model.
|
||||
|
||||
### OpenAI
|
||||
|
||||
- Provider: `openai`
|
||||
- Auth: `OPENAI_API_KEY`
|
||||
- Example model: `openai/gpt-5.2`
|
||||
- CLI: `clawdbot onboard --auth-choice openai-api-key`
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: { defaults: { model: { primary: "openai/gpt-5.2" } } }
|
||||
}
|
||||
```
|
||||
|
||||
### Anthropic
|
||||
|
||||
- Provider: `anthropic`
|
||||
- Auth: `ANTHROPIC_API_KEY` or `claude setup-token`
|
||||
- Example model: `anthropic/claude-opus-4-5`
|
||||
- CLI: `clawdbot onboard --auth-choice setup-token`
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: { defaults: { model: { primary: "anthropic/claude-opus-4-5" } } }
|
||||
}
|
||||
```
|
||||
|
||||
### OpenAI Code (Codex)
|
||||
|
||||
- Provider: `openai-codex`
|
||||
- Auth: OAuth or Codex CLI (`~/.codex/auth.json`)
|
||||
- Example model: `openai-codex/gpt-5.2`
|
||||
- CLI: `clawdbot onboard --auth-choice openai-codex` or `codex-cli`
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.2" } } }
|
||||
}
|
||||
```
|
||||
|
||||
### OpenCode Zen
|
||||
|
||||
- Provider: `opencode`
|
||||
- Auth: `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`)
|
||||
- Example model: `opencode/claude-opus-4-5`
|
||||
- CLI: `clawdbot onboard --auth-choice opencode-zen`
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: { defaults: { model: { primary: "opencode/claude-opus-4-5" } } }
|
||||
}
|
||||
```
|
||||
|
||||
### Google Gemini (API key)
|
||||
|
||||
- Provider: `google`
|
||||
- Auth: `GEMINI_API_KEY`
|
||||
- Example model: `google/gemini-3-pro`
|
||||
- CLI: `clawdbot onboard --auth-choice gemini-api-key`
|
||||
|
||||
### Google Vertex / Antigravity / Gemini CLI
|
||||
|
||||
- Providers: `google-vertex`, `google-antigravity`, `google-gemini-cli`
|
||||
- Auth: Vertex uses gcloud ADC; Antigravity/Gemini CLI use their respective auth flows
|
||||
- CLI: `clawdbot onboard --auth-choice antigravity` (others via interactive wizard)
|
||||
|
||||
### Z.AI (GLM)
|
||||
|
||||
- Provider: `zai`
|
||||
- Auth: `ZAI_API_KEY`
|
||||
- Example model: `zai/glm-4.7`
|
||||
- CLI: `clawdbot onboard --auth-choice zai-api-key`
|
||||
- Aliases: `z.ai/*` and `z-ai/*` normalize to `zai/*`
|
||||
|
||||
### Other built-in providers
|
||||
|
||||
- OpenRouter: `openrouter` (`OPENROUTER_API_KEY`)
|
||||
- Example model: `openrouter/anthropic/claude-sonnet-4-5`
|
||||
- xAI: `xai` (`XAI_API_KEY`)
|
||||
- Groq: `groq` (`GROQ_API_KEY`)
|
||||
- Cerebras: `cerebras` (`CEREBRAS_API_KEY`)
|
||||
- Mistral: `mistral` (`MISTRAL_API_KEY`)
|
||||
- GitHub Copilot: `github-copilot` (`COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN`)
|
||||
|
||||
## Providers via `models.providers` (custom/base URL)
|
||||
|
||||
Use `models.providers` (or `models.json`) to add **custom** providers or
|
||||
OpenAI/Anthropic‑compatible proxies.
|
||||
|
||||
### MiniMax
|
||||
|
||||
MiniMax is configured via `models.providers` because it uses custom endpoints:
|
||||
|
||||
- MiniMax Cloud (OpenAI‑compatible): `--auth-choice minimax-cloud`
|
||||
- MiniMax API (Anthropic‑compatible): `--auth-choice minimax-api`
|
||||
- Auth: `MINIMAX_API_KEY`
|
||||
|
||||
### Local proxies (LM Studio, vLLM, LiteLLM, etc.)
|
||||
|
||||
Example (OpenAI‑compatible):
|
||||
|
||||
```json5
|
||||
{
|
||||
agents: {
|
||||
defaults: {
|
||||
model: { primary: "lmstudio/minimax-m2.1-gs32" },
|
||||
models: { "lmstudio/minimax-m2.1-gs32": { alias: "Minimax" } }
|
||||
}
|
||||
},
|
||||
models: {
|
||||
providers: {
|
||||
lmstudio: {
|
||||
baseUrl: "http://localhost:1234/v1",
|
||||
apiKey: "LMSTUDIO_KEY",
|
||||
api: "openai-completions",
|
||||
models: [
|
||||
{
|
||||
id: "minimax-m2.1-gs32",
|
||||
name: "MiniMax M2.1",
|
||||
reasoning: false,
|
||||
input: ["text"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 200000,
|
||||
maxTokens: 8192
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## CLI examples
|
||||
|
||||
```bash
|
||||
clawdbot onboard --auth-choice opencode-zen
|
||||
clawdbot models set opencode/claude-opus-4-5
|
||||
clawdbot models list
|
||||
```
|
||||
|
||||
See also: [/gateway/configuration](/gateway/configuration) for full configuration examples.
|
||||
@@ -9,6 +9,7 @@ read_when:
|
||||
|
||||
See [/concepts/model-failover](/concepts/model-failover) for auth profile
|
||||
rotation, cooldowns, and how that interacts with fallbacks.
|
||||
Quick provider overview + examples: [/concepts/model-providers](/concepts/model-providers).
|
||||
|
||||
## How model selection works
|
||||
|
||||
|
||||
@@ -580,6 +580,7 @@
|
||||
"group": "Install & Updates",
|
||||
"pages": [
|
||||
"install/updating",
|
||||
"install/ansible",
|
||||
"install/nix",
|
||||
"install/docker",
|
||||
"install/bun"
|
||||
@@ -589,7 +590,9 @@
|
||||
"group": "CLI",
|
||||
"pages": [
|
||||
"cli/index",
|
||||
"cli/message",
|
||||
"cli/gateway",
|
||||
"cli/update",
|
||||
"cli/sandbox"
|
||||
]
|
||||
},
|
||||
@@ -612,12 +615,16 @@
|
||||
"concepts/presence",
|
||||
"concepts/provider-routing",
|
||||
"concepts/messages",
|
||||
"concepts/streaming",
|
||||
"concepts/groups",
|
||||
"concepts/group-messages",
|
||||
"concepts/typing-indicators",
|
||||
"concepts/queue",
|
||||
"concepts/retry",
|
||||
"concepts/model-providers",
|
||||
"concepts/models",
|
||||
"concepts/model-failover",
|
||||
"concepts/usage-tracking",
|
||||
"concepts/timezone",
|
||||
"concepts/typebox"
|
||||
]
|
||||
@@ -628,6 +635,7 @@
|
||||
"gateway",
|
||||
"gateway/pairing",
|
||||
"gateway/gateway-lock",
|
||||
"environment",
|
||||
"gateway/configuration",
|
||||
"gateway/configuration-examples",
|
||||
"gateway/authentication",
|
||||
@@ -637,7 +645,10 @@
|
||||
"gateway/doctor",
|
||||
"gateway/logging",
|
||||
"gateway/security",
|
||||
"gateway/sandbox-vs-tool-policy-vs-elevated",
|
||||
"gateway/sandboxing",
|
||||
"gateway/troubleshooting",
|
||||
"debugging",
|
||||
"gateway/remote",
|
||||
"gateway/remote-gateway-readme",
|
||||
"gateway/discovery",
|
||||
@@ -659,12 +670,15 @@
|
||||
"group": "Providers",
|
||||
"pages": [
|
||||
"providers/whatsapp",
|
||||
"broadcast-groups",
|
||||
"providers/telegram",
|
||||
"providers/grammy",
|
||||
"providers/discord",
|
||||
"providers/slack",
|
||||
"providers/signal",
|
||||
"providers/imessage",
|
||||
"providers/msteams",
|
||||
"providers/troubleshooting",
|
||||
"providers/location"
|
||||
]
|
||||
},
|
||||
@@ -690,6 +704,8 @@
|
||||
"tools/thinking",
|
||||
"tools/agent-send",
|
||||
"tools/subagents",
|
||||
"multi-agent-sandbox-tools",
|
||||
"tools/reactions",
|
||||
"tools/skills",
|
||||
"tools/skills-config",
|
||||
"tools/clawdhub"
|
||||
|
||||
@@ -1423,6 +1423,7 @@ Clawdbot uses the **pi-coding-agent** model catalog. You can add custom provider
|
||||
(LiteLLM, local OpenAI-compatible servers, Anthropic proxies, etc.) by writing
|
||||
`~/.clawdbot/agents/<agentId>/agent/models.json` or by defining the same schema inside your
|
||||
Clawdbot config under `models.providers`.
|
||||
Provider-by-provider overview + examples: [/concepts/model-providers](/concepts/model-providers).
|
||||
|
||||
When `models.providers` is present, Clawdbot writes/merges a `models.json` into
|
||||
`~/.clawdbot/agents/<agentId>/agent/` on startup:
|
||||
@@ -1467,10 +1468,12 @@ Select the model via `agents.defaults.model.primary` (provider/model).
|
||||
|
||||
### OpenCode Zen (multi-model proxy)
|
||||
|
||||
OpenCode Zen is an OpenAI-compatible proxy at `https://opencode.ai/zen/v1`. Get an API key at https://opencode.ai/auth and set `OPENCODE_ZEN_API_KEY`.
|
||||
OpenCode Zen is a multi-model gateway with per-model endpoints. Clawdbot uses
|
||||
the built-in `opencode` provider from pi-ai; set `OPENCODE_API_KEY` (or
|
||||
`OPENCODE_ZEN_API_KEY`) from https://opencode.ai/auth.
|
||||
|
||||
Notes:
|
||||
- Model refs use `opencode-zen/<modelId>` (example: `opencode-zen/claude-opus-4-5`).
|
||||
- Model refs use `opencode/<modelId>` (example: `opencode/claude-opus-4-5`).
|
||||
- If you enable an allowlist via `agents.defaults.models`, add each model you plan to use.
|
||||
- Shortcut: `clawdbot onboard --auth-choice opencode-zen`.
|
||||
|
||||
@@ -1478,29 +1481,8 @@ Notes:
|
||||
{
|
||||
agents: {
|
||||
defaults: {
|
||||
model: { primary: "opencode-zen/claude-opus-4-5" },
|
||||
models: { "opencode-zen/claude-opus-4-5": { alias: "Opus" } }
|
||||
}
|
||||
},
|
||||
models: {
|
||||
mode: "merge",
|
||||
providers: {
|
||||
"opencode-zen": {
|
||||
baseUrl: "https://opencode.ai/zen/v1",
|
||||
apiKey: "${OPENCODE_ZEN_API_KEY}",
|
||||
api: "openai-completions",
|
||||
models: [
|
||||
{
|
||||
id: "claude-opus-4-5",
|
||||
name: "Claude Opus 4.5",
|
||||
reasoning: true,
|
||||
input: ["text", "image"],
|
||||
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||
contextWindow: 200000,
|
||||
maxTokens: 32000
|
||||
}
|
||||
]
|
||||
}
|
||||
model: { primary: "opencode/claude-opus-4-5" },
|
||||
models: { "opencode/claude-opus-4-5": { alias: "Opus" } }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ Tip: `--json` does **not** imply non-interactive mode. Use `--non-interactive` (
|
||||
- **OpenAI Code (Codex) subscription (OAuth)**: browser flow; paste the `code#state`.
|
||||
- Sets `agents.defaults.model` to `openai-codex/gpt-5.2` when model is unset or `openai/*`.
|
||||
- **OpenAI API key**: uses `OPENAI_API_KEY` if present or prompts for a key, then saves it to `~/.clawdbot/.env` so launchd can read it.
|
||||
- **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_ZEN_API_KEY` (get it at https://opencode.ai/auth).
|
||||
- **OpenCode Zen (multi-model proxy)**: prompts for `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`, get it at https://opencode.ai/auth).
|
||||
- **API key**: stores the key for you.
|
||||
- **MiniMax M2.1 (minimax.io)**: config is auto‑written for the OpenAI-compatible `/v1` endpoint.
|
||||
- **MiniMax API (platform.minimax.io)**: config is auto‑written for the Anthropic-compatible `/anthropic` endpoint.
|
||||
@@ -205,7 +205,7 @@ OpenCode Zen example:
|
||||
clawdbot onboard --non-interactive \
|
||||
--mode local \
|
||||
--auth-choice opencode-zen \
|
||||
--opencode-zen-api-key "$OPENCODE_ZEN_API_KEY" \
|
||||
--opencode-zen-api-key "$OPENCODE_API_KEY" \
|
||||
--gateway-port 18789 \
|
||||
--gateway-bind loopback
|
||||
```
|
||||
|
||||
@@ -2,6 +2,7 @@ import { type Api, getEnvApiKey, type Model } from "@mariozechner/pi-ai";
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import type { ModelProviderConfig } from "../config/types.js";
|
||||
import { getShellEnvAppliedKeys } from "../infra/shell-env.js";
|
||||
import { normalizeProviderId } from "./model-selection.js";
|
||||
import {
|
||||
type AuthProfileStore,
|
||||
ensureAuthProfileStore,
|
||||
@@ -103,6 +104,7 @@ export type EnvApiKeyResult = { apiKey: string; source: string };
|
||||
export type ModelAuthMode = "api-key" | "oauth" | "token" | "mixed" | "unknown";
|
||||
|
||||
export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
const normalized = normalizeProviderId(provider);
|
||||
const applied = new Set(getShellEnvAppliedKeys());
|
||||
const pick = (envVar: string): EnvApiKeyResult | null => {
|
||||
const value = process.env[envVar]?.trim();
|
||||
@@ -113,26 +115,30 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
return { apiKey: value, source };
|
||||
};
|
||||
|
||||
if (provider === "github-copilot") {
|
||||
if (normalized === "github-copilot") {
|
||||
return (
|
||||
pick("COPILOT_GITHUB_TOKEN") ?? pick("GH_TOKEN") ?? pick("GITHUB_TOKEN")
|
||||
);
|
||||
}
|
||||
|
||||
if (provider === "anthropic") {
|
||||
if (normalized === "anthropic") {
|
||||
return pick("ANTHROPIC_OAUTH_TOKEN") ?? pick("ANTHROPIC_API_KEY");
|
||||
}
|
||||
|
||||
if (provider === "zai") {
|
||||
if (normalized === "zai") {
|
||||
return pick("ZAI_API_KEY") ?? pick("Z_AI_API_KEY");
|
||||
}
|
||||
|
||||
if (provider === "google-vertex") {
|
||||
const envKey = getEnvApiKey(provider);
|
||||
if (normalized === "google-vertex") {
|
||||
const envKey = getEnvApiKey(normalized);
|
||||
if (!envKey) return null;
|
||||
return { apiKey: envKey, source: "gcloud adc" };
|
||||
}
|
||||
|
||||
if (normalized === "opencode") {
|
||||
return pick("OPENCODE_API_KEY") ?? pick("OPENCODE_ZEN_API_KEY");
|
||||
}
|
||||
|
||||
const envMap: Record<string, string> = {
|
||||
openai: "OPENAI_API_KEY",
|
||||
google: "GEMINI_API_KEY",
|
||||
@@ -142,9 +148,9 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
|
||||
openrouter: "OPENROUTER_API_KEY",
|
||||
minimax: "MINIMAX_API_KEY",
|
||||
mistral: "MISTRAL_API_KEY",
|
||||
"opencode-zen": "OPENCODE_ZEN_API_KEY",
|
||||
opencode: "OPENCODE_API_KEY",
|
||||
};
|
||||
const envVar = envMap[provider];
|
||||
const envVar = envMap[normalized];
|
||||
if (!envVar) return null;
|
||||
return pick(envVar);
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ export function modelKey(provider: string, model: string) {
|
||||
export function normalizeProviderId(provider: string): string {
|
||||
const normalized = provider.trim().toLowerCase();
|
||||
if (normalized === "z.ai" || normalized === "z-ai") return "zai";
|
||||
if (normalized === "opencode-zen") return "opencode";
|
||||
return normalized;
|
||||
}
|
||||
|
||||
|
||||
@@ -41,12 +41,18 @@ describe("resolveOpencodeZenAlias", () => {
|
||||
});
|
||||
|
||||
describe("resolveOpencodeZenModelApi", () => {
|
||||
it("returns openai-completions for all models (OpenCode Zen is OpenAI-compatible)", () => {
|
||||
it("maps APIs by model family", () => {
|
||||
expect(resolveOpencodeZenModelApi("claude-opus-4-5")).toBe(
|
||||
"openai-completions",
|
||||
"anthropic-messages",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("minimax-m2.1-free")).toBe(
|
||||
"anthropic-messages",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-completions");
|
||||
expect(resolveOpencodeZenModelApi("gemini-3-pro")).toBe(
|
||||
"google-generative-ai",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("gpt-5.2")).toBe("openai-responses");
|
||||
expect(resolveOpencodeZenModelApi("glm-4.7-free")).toBe(
|
||||
"openai-completions",
|
||||
);
|
||||
expect(resolveOpencodeZenModelApi("some-unknown-model")).toBe(
|
||||
|
||||
@@ -12,7 +12,7 @@ import type { ModelApi, ModelDefinitionConfig } from "../config/types.js";
|
||||
|
||||
export const OPENCODE_ZEN_API_BASE_URL = "https://opencode.ai/zen/v1";
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "claude-opus-4-5";
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode-zen/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL_REF = `opencode/${OPENCODE_ZEN_DEFAULT_MODEL}`;
|
||||
|
||||
// Cache for fetched models (1 hour TTL)
|
||||
let cachedModels: ModelDefinitionConfig[] | null = null;
|
||||
@@ -87,10 +87,23 @@ export function resolveOpencodeZenAlias(modelIdOrAlias: string): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* OpenCode Zen is an OpenAI-compatible proxy for all models.
|
||||
* All requests go through /chat/completions regardless of the underlying model.
|
||||
* OpenCode Zen routes models to different APIs based on model family.
|
||||
*/
|
||||
export function resolveOpencodeZenModelApi(_modelId: string): ModelApi {
|
||||
export function resolveOpencodeZenModelApi(modelId: string): ModelApi {
|
||||
const lower = modelId.toLowerCase();
|
||||
if (
|
||||
lower.startsWith("claude-") ||
|
||||
lower.startsWith("minimax") ||
|
||||
lower.startsWith("alpha-gd4")
|
||||
) {
|
||||
return "anthropic-messages";
|
||||
}
|
||||
if (lower.startsWith("gemini-")) {
|
||||
return "google-generative-ai";
|
||||
}
|
||||
if (lower.startsWith("gpt-")) {
|
||||
return "openai-responses";
|
||||
}
|
||||
return "openai-completions";
|
||||
}
|
||||
|
||||
|
||||
@@ -147,7 +147,7 @@ describe("applyAuthChoice", () => {
|
||||
expect(result.config.agents?.defaults?.model?.primary).toBe(
|
||||
"anthropic/claude-opus-4-5",
|
||||
);
|
||||
expect(result.config.models?.providers?.["opencode-zen"]).toBeDefined();
|
||||
expect(result.agentModelOverride).toBe("opencode-zen/claude-opus-4-5");
|
||||
expect(result.config.models?.providers?.["opencode-zen"]).toBeUndefined();
|
||||
expect(result.agentModelOverride).toBe("opencode/claude-opus-4-5");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -710,8 +710,8 @@ export async function applyAuthChoice(params: {
|
||||
});
|
||||
await setOpencodeZenApiKey(String(key).trim(), params.agentDir);
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId: "opencode-zen:default",
|
||||
provider: "opencode-zen",
|
||||
profileId: "opencode:default",
|
||||
provider: "opencode",
|
||||
mode: "api_key",
|
||||
});
|
||||
if (params.setDefaultModel) {
|
||||
@@ -755,7 +755,7 @@ export function resolvePreferredProviderForAuthChoice(
|
||||
case "minimax":
|
||||
return "lmstudio";
|
||||
case "opencode-zen":
|
||||
return "opencode-zen";
|
||||
return "opencode";
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -258,23 +258,10 @@ describe("applyMinimaxApiProviderConfig", () => {
|
||||
});
|
||||
|
||||
describe("applyOpencodeZenProviderConfig", () => {
|
||||
it("adds opencode-zen provider with correct settings", () => {
|
||||
const cfg = applyOpencodeZenProviderConfig({});
|
||||
expect(cfg.models?.providers?.["opencode-zen"]).toMatchObject({
|
||||
baseUrl: "https://opencode.ai/zen/v1",
|
||||
apiKey: "opencode-zen",
|
||||
api: "openai-completions",
|
||||
});
|
||||
expect(
|
||||
cfg.models?.providers?.["opencode-zen"]?.models.length,
|
||||
).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("adds allowlist entries for fallback models", () => {
|
||||
it("adds allowlist entry for the default model", () => {
|
||||
const cfg = applyOpencodeZenProviderConfig({});
|
||||
const models = cfg.agents?.defaults?.models ?? {};
|
||||
expect(Object.keys(models)).toContain("opencode-zen/claude-opus-4-5");
|
||||
expect(Object.keys(models)).toContain("opencode-zen/gpt-5.2");
|
||||
expect(Object.keys(models)).toContain("opencode/claude-opus-4-5");
|
||||
});
|
||||
|
||||
it("preserves existing alias for the default model", () => {
|
||||
@@ -282,13 +269,13 @@ describe("applyOpencodeZenProviderConfig", () => {
|
||||
agents: {
|
||||
defaults: {
|
||||
models: {
|
||||
"opencode-zen/claude-opus-4-5": { alias: "My Opus" },
|
||||
"opencode/claude-opus-4-5": { alias: "My Opus" },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(
|
||||
cfg.agents?.defaults?.models?.["opencode-zen/claude-opus-4-5"]?.alias,
|
||||
cfg.agents?.defaults?.models?.["opencode/claude-opus-4-5"]?.alias,
|
||||
).toBe("My Opus");
|
||||
});
|
||||
});
|
||||
@@ -297,7 +284,7 @@ describe("applyOpencodeZenConfig", () => {
|
||||
it("sets correct primary model", () => {
|
||||
const cfg = applyOpencodeZenConfig({});
|
||||
expect(cfg.agents?.defaults?.model?.primary).toBe(
|
||||
"opencode-zen/claude-opus-4-5",
|
||||
"opencode/claude-opus-4-5",
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import type { OAuthCredentials, OAuthProvider } from "@mariozechner/pi-ai";
|
||||
import { resolveDefaultAgentDir } from "../agents/agent-scope.js";
|
||||
import { upsertAuthProfile } from "../agents/auth-profiles.js";
|
||||
import {
|
||||
getOpencodeZenStaticFallbackModels,
|
||||
OPENCODE_ZEN_API_BASE_URL,
|
||||
OPENCODE_ZEN_DEFAULT_MODEL_REF,
|
||||
} from "../agents/opencode-zen-models.js";
|
||||
import { OPENCODE_ZEN_DEFAULT_MODEL_REF } from "../agents/opencode-zen-models.js";
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import type { ModelDefinitionConfig } from "../config/types.js";
|
||||
|
||||
@@ -450,10 +446,10 @@ export function applyMinimaxApiConfig(
|
||||
|
||||
export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
|
||||
upsertAuthProfile({
|
||||
profileId: "opencode-zen:default",
|
||||
profileId: "opencode:default",
|
||||
credential: {
|
||||
type: "api_key",
|
||||
provider: "opencode-zen",
|
||||
provider: "opencode",
|
||||
key,
|
||||
},
|
||||
agentDir: agentDir ?? resolveDefaultAgentDir(),
|
||||
@@ -463,21 +459,8 @@ export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
|
||||
export function applyOpencodeZenProviderConfig(
|
||||
cfg: ClawdbotConfig,
|
||||
): ClawdbotConfig {
|
||||
const opencodeModels = getOpencodeZenStaticFallbackModels();
|
||||
|
||||
const providers = { ...cfg.models?.providers };
|
||||
providers["opencode-zen"] = {
|
||||
baseUrl: OPENCODE_ZEN_API_BASE_URL,
|
||||
apiKey: "opencode-zen",
|
||||
api: "openai-completions",
|
||||
models: opencodeModels,
|
||||
};
|
||||
|
||||
// Use the built-in opencode provider from pi-ai; only seed the allowlist alias.
|
||||
const models = { ...cfg.agents?.defaults?.models };
|
||||
for (const model of opencodeModels) {
|
||||
const key = `opencode-zen/${model.id}`;
|
||||
models[key] = models[key] ?? {};
|
||||
}
|
||||
models[OPENCODE_ZEN_DEFAULT_MODEL_REF] = {
|
||||
...models[OPENCODE_ZEN_DEFAULT_MODEL_REF],
|
||||
alias: models[OPENCODE_ZEN_DEFAULT_MODEL_REF]?.alias ?? "Opus",
|
||||
@@ -492,10 +475,6 @@ export function applyOpencodeZenProviderConfig(
|
||||
models,
|
||||
},
|
||||
},
|
||||
models: {
|
||||
mode: cfg.models?.mode ?? "merge",
|
||||
providers,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -337,11 +337,11 @@ export async function runNonInteractiveOnboarding(
|
||||
nextConfig = applyMinimaxConfig(nextConfig);
|
||||
} else if (authChoice === "opencode-zen") {
|
||||
const resolved = await resolveNonInteractiveApiKey({
|
||||
provider: "opencode-zen",
|
||||
provider: "opencode",
|
||||
cfg: baseConfig,
|
||||
flagValue: opts.opencodeZenApiKey,
|
||||
flagName: "--opencode-zen-api-key",
|
||||
envVar: "OPENCODE_ZEN_API_KEY",
|
||||
envVar: "OPENCODE_API_KEY (or OPENCODE_ZEN_API_KEY)",
|
||||
runtime,
|
||||
});
|
||||
if (!resolved) return;
|
||||
@@ -349,8 +349,8 @@ export async function runNonInteractiveOnboarding(
|
||||
await setOpencodeZenApiKey(resolved.key);
|
||||
}
|
||||
nextConfig = applyAuthProfileConfig(nextConfig, {
|
||||
profileId: "opencode-zen:default",
|
||||
provider: "opencode-zen",
|
||||
profileId: "opencode:default",
|
||||
provider: "opencode",
|
||||
mode: "api_key",
|
||||
});
|
||||
nextConfig = applyOpencodeZenConfig(nextConfig);
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
} from "./opencode-zen-model-default.js";
|
||||
|
||||
describe("applyOpencodeZenModelDefault", () => {
|
||||
it("sets opencode-zen default when model is unset", () => {
|
||||
it("sets opencode default when model is unset", () => {
|
||||
const cfg: ClawdbotConfig = { agents: { defaults: {} } };
|
||||
const applied = applyOpencodeZenModelDefault(cfg);
|
||||
expect(applied.changed).toBe(true);
|
||||
@@ -36,6 +36,15 @@ describe("applyOpencodeZenModelDefault", () => {
|
||||
expect(applied.next).toEqual(cfg);
|
||||
});
|
||||
|
||||
it("no-ops when already legacy opencode-zen default", () => {
|
||||
const cfg = {
|
||||
agents: { defaults: { model: "opencode-zen/claude-opus-4-5" } },
|
||||
} as ClawdbotConfig;
|
||||
const applied = applyOpencodeZenModelDefault(cfg);
|
||||
expect(applied.changed).toBe(false);
|
||||
expect(applied.next).toEqual(cfg);
|
||||
});
|
||||
|
||||
it("preserves fallbacks when setting primary", () => {
|
||||
const cfg: ClawdbotConfig = {
|
||||
agents: {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { ClawdbotConfig } from "../config/config.js";
|
||||
import type { AgentModelListConfig } from "../config/types.js";
|
||||
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
|
||||
export const OPENCODE_ZEN_DEFAULT_MODEL = "opencode/claude-opus-4-5";
|
||||
const LEGACY_OPENCODE_ZEN_DEFAULT_MODEL = "opencode-zen/claude-opus-4-5";
|
||||
|
||||
function resolvePrimaryModel(
|
||||
model?: AgentModelListConfig | string,
|
||||
@@ -18,7 +19,11 @@ export function applyOpencodeZenModelDefault(cfg: ClawdbotConfig): {
|
||||
changed: boolean;
|
||||
} {
|
||||
const current = resolvePrimaryModel(cfg.agents?.defaults?.model)?.trim();
|
||||
if (current === OPENCODE_ZEN_DEFAULT_MODEL) {
|
||||
const normalizedCurrent =
|
||||
current === LEGACY_OPENCODE_ZEN_DEFAULT_MODEL
|
||||
? OPENCODE_ZEN_DEFAULT_MODEL
|
||||
: current;
|
||||
if (normalizedCurrent === OPENCODE_ZEN_DEFAULT_MODEL) {
|
||||
return { next: cfg, changed: false };
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user