mirror of
https://github.com/openclaw/openclaw.git
synced 2026-02-19 18:39:20 -05:00
fix: improve error for unconfigured local providers (ollama/vllm) (#17328)
When a user sets `agents.defaults.model.primary: "ollama/gemma3:4b"`
but forgets to set OLLAMA_API_KEY, the error is a confusing
"unknown model: ollama/gemma3:4b". The Ollama provider requires any
dummy API key to register (the local server doesn't actually check it),
but this isn't obvious from the error.
Add `buildUnknownModelError()` that detects known local providers
(ollama, vllm) and appends an actionable hint with the env var name
and a link to the relevant docs page.
Before: Unknown model: ollama/gemma3:4b
After: Unknown model: ollama/gemma3:4b. Ollama requires authentication
to be registered as a provider. Set OLLAMA_API_KEY="ollama-local"
(any value works) or run "openclaw configure".
See: https://docs.openclaw.ai/providers/ollama
Closes #17328
This commit is contained in:
committed by
Peter Steinberger
parent
6e1edc7d62
commit
4df970d711
@@ -340,4 +340,29 @@ describe("resolveModel", () => {
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("includes auth hint for unknown ollama models (#17328)", () => {
|
||||
// resetMockDiscoverModels() in beforeEach already sets find → null
|
||||
const result = resolveModel("ollama", "gemma3:4b", "/tmp/agent");
|
||||
|
||||
expect(result.model).toBeUndefined();
|
||||
expect(result.error).toContain("Unknown model: ollama/gemma3:4b");
|
||||
expect(result.error).toContain("OLLAMA_API_KEY");
|
||||
expect(result.error).toContain("docs.openclaw.ai/providers/ollama");
|
||||
});
|
||||
|
||||
it("includes auth hint for unknown vllm models", () => {
|
||||
const result = resolveModel("vllm", "llama-3-70b", "/tmp/agent");
|
||||
|
||||
expect(result.model).toBeUndefined();
|
||||
expect(result.error).toContain("Unknown model: vllm/llama-3-70b");
|
||||
expect(result.error).toContain("VLLM_API_KEY");
|
||||
});
|
||||
|
||||
it("does not add auth hint for non-local providers", () => {
|
||||
const result = resolveModel("google-antigravity", "some-model", "/tmp/agent");
|
||||
|
||||
expect(result.model).toBeUndefined();
|
||||
expect(result.error).toBe("Unknown model: google-antigravity/some-model");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -93,10 +93,38 @@ export function resolveModel(
|
||||
return { model: fallbackModel, authStorage, modelRegistry };
|
||||
}
|
||||
return {
|
||||
error: `Unknown model: ${provider}/${modelId}`,
|
||||
error: buildUnknownModelError(provider, modelId),
|
||||
authStorage,
|
||||
modelRegistry,
|
||||
};
|
||||
}
|
||||
return { model: normalizeModelCompat(model), authStorage, modelRegistry };
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a more helpful error when the model is not found.
|
||||
*
|
||||
* Local providers (ollama, vllm) need a dummy API key to be registered.
|
||||
* Users often configure `agents.defaults.model.primary: "ollama/…"` but
|
||||
* forget to set `OLLAMA_API_KEY`, resulting in a confusing "Unknown model"
|
||||
* error. This detects known providers that require opt-in auth and adds
|
||||
* a hint.
|
||||
*
|
||||
* See: https://github.com/openclaw/openclaw/issues/17328
|
||||
*/
|
||||
const LOCAL_PROVIDER_HINTS: Record<string, string> = {
|
||||
ollama:
|
||||
"Ollama requires authentication to be registered as a provider. " +
|
||||
'Set OLLAMA_API_KEY="ollama-local" (any value works) or run "openclaw configure". ' +
|
||||
"See: https://docs.openclaw.ai/providers/ollama",
|
||||
vllm:
|
||||
"vLLM requires authentication to be registered as a provider. " +
|
||||
'Set VLLM_API_KEY (any value works) or run "openclaw configure". ' +
|
||||
"See: https://docs.openclaw.ai/providers/vllm",
|
||||
};
|
||||
|
||||
function buildUnknownModelError(provider: string, modelId: string): string {
|
||||
const base = `Unknown model: ${provider}/${modelId}`;
|
||||
const hint = LOCAL_PROVIDER_HINTS[provider.toLowerCase()];
|
||||
return hint ? `${base}. ${hint}` : base;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user