fix: refine custom endpoint detection (#11106) (thanks @MackDing)

This commit is contained in:
Gustavo Madeira Santana
2026-02-10 00:43:31 -05:00
parent 14c32d6223
commit ed20354abf
6 changed files with 422 additions and 145 deletions

View File

@@ -31,8 +31,8 @@ Flow notes:
- `quickstart`: minimal prompts, auto-generates a gateway token.
- `manual`: full prompts for port/bind/auth (alias of `advanced`).
- Fastest first chat: `openclaw dashboard` (Control UI, no channel setup).
- Custom API Endpoint: choose OpenAI-compatible or Anthropic-compatible endpoints
for self-hosted servers.
- Custom API Endpoint: connect any OpenAI or Anthropic compatible endpoint,
including hosted providers not listed. Use Unknown to auto-detect.
## Common follow-up commands

View File

@@ -39,11 +39,12 @@ Use the OpenClaw app when you want a fully guided setup on macOS. Docs:
## Custom API Endpoint
If you run your own model server, choose **Custom API Endpoint** in the CLI
wizard. You will be asked to:
If you need an endpoint that is not listed, including hosted providers that
expose standard OpenAI or Anthropic APIs, choose **Custom API Endpoint** in the
CLI wizard. You will be asked to:
- Pick OpenAI-compatible or Anthropic-compatible endpoints.
- Enter a base URL and optional API key.
- Pick OpenAI-compatible, Anthropic-compatible, or **Unknown** (auto-detect).
- Enter a base URL and API key (if required by the provider).
- Provide a model ID and optional alias.
- Choose an Endpoint ID so multiple custom endpoints can coexist.

View File

@@ -63,7 +63,7 @@ The wizard starts with **QuickStart** (defaults) vs **Advanced** (full control).
**Local mode (default)** walks you through these steps:
1. **Model/Auth** — Anthropic API key (recommended), OpenAI, or Custom API Endpoint
(OpenAI-compatible or Anthropic-compatible). Pick a default model.
(OpenAI-compatible, Anthropic-compatible, or Unknown auto-detect). Pick a default model.
2. **Workspace** — Location for agent files (default `~/.openclaw/workspace`). Seeds bootstrap files.
3. **Gateway** — Port, bind address, auth mode, Tailscale exposure.
4. **Channels** — WhatsApp, Telegram, Discord, Google Chat, Mattermost, Signal, BlueBubbles, or iMessage.

View File

@@ -152,7 +152,7 @@ const AUTH_CHOICE_GROUP_DEFS: {
{
value: "custom",
label: "Custom API Endpoint",
hint: "Ollama, OpenAI/Anthropic-compatible endpoints",
hint: "Any OpenAI or Anthropic compatible endpoint",
choices: ["custom-api-key"],
},
];

View File

@@ -95,6 +95,99 @@ describe("promptCustomApiConfig", () => {
expect(prompter.confirm).toHaveBeenCalled();
});
it("detects openai compatibility when unknown", async () => {
const prompter = {
text: vi
.fn()
.mockResolvedValueOnce("https://example.com/v1") // Base URL
.mockResolvedValueOnce("test-key") // API Key
.mockResolvedValueOnce("detected-model") // Model ID
.mockResolvedValueOnce("custom") // Endpoint ID
.mockResolvedValueOnce("alias"), // Alias
progress: vi.fn(() => ({
update: vi.fn(),
stop: vi.fn(),
})),
select: vi.fn().mockResolvedValueOnce("unknown"),
confirm: vi.fn(),
note: vi.fn(),
};
vi.stubGlobal(
"fetch",
vi
.fn()
.mockResolvedValueOnce({
ok: true,
json: async () => ({ data: [] }),
})
.mockResolvedValueOnce({
ok: true,
json: async () => ({}),
}),
);
const result = await promptCustomApiConfig({
prompter: prompter as unknown as Parameters<typeof promptCustomApiConfig>[0]["prompter"],
runtime: { ...defaultRuntime, log: vi.fn() },
config: {},
});
expect(prompter.text).toHaveBeenCalledTimes(5);
expect(prompter.select).toHaveBeenCalledTimes(1);
expect(result.config.models?.providers?.custom?.api).toBe("openai-completions");
});
it("re-prompts base url when unknown detection fails", async () => {
const prompter = {
text: vi
.fn()
.mockResolvedValueOnce("https://bad.example.com/v1") // Base URL #1
.mockResolvedValueOnce("bad-key") // API Key #1
.mockResolvedValueOnce("bad-model") // Model ID #1
.mockResolvedValueOnce("https://ok.example.com/v1") // Base URL #2
.mockResolvedValueOnce("ok-key") // API Key #2
.mockResolvedValueOnce("ok-model") // Model ID #2
.mockResolvedValueOnce("custom") // Endpoint ID
.mockResolvedValueOnce(""), // Alias
progress: vi.fn(() => ({
update: vi.fn(),
stop: vi.fn(),
})),
select: vi.fn().mockResolvedValueOnce("unknown"),
confirm: vi.fn(),
note: vi.fn(),
};
vi.stubGlobal(
"fetch",
vi
.fn()
.mockResolvedValueOnce({
ok: true,
json: async () => ({ data: [] }),
})
.mockResolvedValueOnce({ ok: false, status: 404, json: async () => ({}) })
.mockResolvedValueOnce({ ok: false, status: 404, json: async () => ({}) })
.mockResolvedValueOnce({
ok: true,
json: async () => ({ data: [] }),
})
.mockResolvedValueOnce({ ok: true, json: async () => ({}) }),
);
await promptCustomApiConfig({
prompter: prompter as unknown as Parameters<typeof promptCustomApiConfig>[0]["prompter"],
runtime: { ...defaultRuntime, log: vi.fn() },
config: {},
});
expect(prompter.note).toHaveBeenCalledWith(
expect.stringContaining("did not respond"),
"Endpoint detection",
);
});
it("renames provider id when baseUrl differs", async () => {
const prompter = {
text: vi

View File

@@ -15,6 +15,7 @@ const DISCOVERY_TIMEOUT_MS = 5000;
const VERIFY_TIMEOUT_MS = 10000;
type CustomApiCompatibility = "openai" | "anthropic";
type CustomApiCompatibilityChoice = CustomApiCompatibility | "unknown";
type CustomApiResult = {
config: OpenClawConfig;
providerId?: string;
@@ -22,10 +23,10 @@ type CustomApiResult = {
};
const COMPATIBILITY_OPTIONS: Array<{
value: CustomApiCompatibility;
value: CustomApiCompatibilityChoice;
label: string;
hint: string;
api: "openai-completions" | "anthropic-messages";
api?: "openai-completions" | "anthropic-messages";
}> = [
{
value: "openai",
@@ -39,9 +40,14 @@ const COMPATIBILITY_OPTIONS: Array<{
hint: "Uses /messages",
api: "anthropic-messages",
},
{
value: "unknown",
label: "Unknown (detect automatically)",
hint: "Probes OpenAI then Anthropic endpoints",
},
];
function resolveBaseUrlDefaults(compatibility: CustomApiCompatibility) {
function resolveBaseUrlDefaults(compatibility: CustomApiCompatibilityChoice) {
if (compatibility === "anthropic") {
return {
initialValue: DEFAULT_ANTHROPIC_BASE_URL,
@@ -124,29 +130,181 @@ function resolveAliasError(params: {
return `Alias ${normalized} already points to ${existingKey}.`;
}
export async function promptCustomApiConfig(params: {
prompter: WizardPrompter;
runtime: RuntimeEnv;
config: OpenClawConfig;
}): Promise<CustomApiResult> {
const { prompter, runtime, config } = params;
function buildOpenAiHeaders(apiKey: string) {
const headers: Record<string, string> = {};
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}
return headers;
}
const compatibility = await prompter.select({
message: "Endpoint compatibility",
options: COMPATIBILITY_OPTIONS.map((option) => ({
value: option.value,
label: option.label,
hint: option.hint,
})),
function buildAnthropicHeaders(apiKey: string) {
const headers: Record<string, string> = {
"anthropic-version": "2023-06-01",
};
if (apiKey) {
headers["x-api-key"] = apiKey;
}
return headers;
}
async function fetchWithTimeout(
url: string,
init: RequestInit,
timeoutMs: number,
): Promise<Response> {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
try {
return await fetch(url, { ...init, signal: controller.signal });
} finally {
clearTimeout(timeout);
}
}
function parseOpenAiModels(data: { data?: { id: string }[]; models?: { id: string }[] }) {
const rawModels = data.data || data.models || [];
return rawModels.map((m: unknown) => {
if (typeof m === "string") {
return m;
}
if (typeof m === "object" && m !== null && "id" in m) {
return (m as { id: string }).id;
}
return String(m);
});
const option = COMPATIBILITY_OPTIONS.find((entry) => entry.value === compatibility);
const providerApi = option?.api ?? "openai-completions";
}
const baseDefaults = resolveBaseUrlDefaults(compatibility);
const baseUrlInput = await prompter.text({
function formatVerificationError(error: unknown): string {
if (!error) {
return "unknown error";
}
if (error instanceof Error) {
return error.message;
}
if (typeof error === "string") {
return error;
}
try {
return JSON.stringify(error);
} catch {
return "unknown error";
}
}
async function tryDiscoverOpenAiModels(params: {
baseUrl: string;
apiKey: string;
prompter: WizardPrompter;
}): Promise<string[] | null> {
const { baseUrl, apiKey, prompter } = params;
const spinner = prompter.progress("Connecting...");
spinner.update(`Scanning models at ${baseUrl}...`);
try {
const discoveryUrl = new URL("models", baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`).href;
const res = await fetchWithTimeout(
discoveryUrl,
{ headers: buildOpenAiHeaders(apiKey) },
DISCOVERY_TIMEOUT_MS,
);
if (res.ok) {
const data = (await res.json()) as { data?: { id: string }[]; models?: { id: string }[] };
const models = parseOpenAiModels(data);
if (models.length > 0) {
spinner.stop(`Found ${models.length} models.`);
return models;
}
spinner.stop("Connected, but no models list returned.");
return [];
}
spinner.stop(`Connection succeeded, but discovery failed (${res.status}).`);
return null;
} catch {
spinner.stop("Could not auto-detect models.");
return null;
}
}
type VerificationResult = {
ok: boolean;
status?: number;
error?: unknown;
};
async function requestOpenAiVerification(params: {
baseUrl: string;
apiKey: string;
modelId: string;
}): Promise<VerificationResult> {
const endpoint = new URL(
"chat/completions",
params.baseUrl.endsWith("/") ? params.baseUrl : `${params.baseUrl}/`,
).href;
try {
const res = await fetchWithTimeout(
endpoint,
{
method: "POST",
headers: {
"Content-Type": "application/json",
...buildOpenAiHeaders(params.apiKey),
},
body: JSON.stringify({
model: params.modelId,
messages: [{ role: "user", content: "Hi" }],
max_tokens: 5,
}),
},
VERIFY_TIMEOUT_MS,
);
return { ok: res.ok, status: res.status };
} catch (error) {
return { ok: false, error };
}
}
async function requestAnthropicVerification(params: {
baseUrl: string;
apiKey: string;
modelId: string;
}): Promise<VerificationResult> {
const endpoint = new URL(
"messages",
params.baseUrl.endsWith("/") ? params.baseUrl : `${params.baseUrl}/`,
).href;
try {
const res = await fetchWithTimeout(
endpoint,
{
method: "POST",
headers: {
"Content-Type": "application/json",
...buildAnthropicHeaders(params.apiKey),
},
body: JSON.stringify({
model: params.modelId,
max_tokens: 16,
messages: [{ role: "user", content: "Hi" }],
}),
},
VERIFY_TIMEOUT_MS,
);
return { ok: res.ok, status: res.status };
} catch (error) {
return { ok: false, error };
}
}
async function promptBaseUrlAndKey(params: {
prompter: WizardPrompter;
compatibility: CustomApiCompatibilityChoice;
initialBaseUrl?: string;
}): Promise<{ baseUrl: string; apiKey: string }> {
const defaults = resolveBaseUrlDefaults(params.compatibility);
const baseUrlInput = await params.prompter.text({
message: "API Base URL",
initialValue: baseDefaults.initialValue,
placeholder: baseDefaults.placeholder,
initialValue: params.initialBaseUrl ?? defaults.initialValue,
placeholder: defaults.placeholder,
validate: (val) => {
try {
new URL(val);
@@ -156,14 +314,110 @@ export async function promptCustomApiConfig(params: {
}
},
});
const baseUrl = baseUrlInput.trim();
const apiKeyInput = await prompter.text({
message: "API Key (optional for local)",
const apiKeyInput = await params.prompter.text({
message: "API Key (optional)",
placeholder: "sk-...",
initialValue: "",
});
const apiKey = apiKeyInput.trim();
return { baseUrl: baseUrlInput.trim(), apiKey: apiKeyInput.trim() };
}
export async function promptCustomApiConfig(params: {
prompter: WizardPrompter;
runtime: RuntimeEnv;
config: OpenClawConfig;
}): Promise<CustomApiResult> {
const { prompter, runtime, config } = params;
const compatibilityChoice = await prompter.select({
message: "Endpoint compatibility",
options: COMPATIBILITY_OPTIONS.map((option) => ({
value: option.value,
label: option.label,
hint: option.hint,
})),
});
let compatibility: CustomApiCompatibility | null =
compatibilityChoice === "unknown" ? null : compatibilityChoice;
let providerApi =
COMPATIBILITY_OPTIONS.find((entry) => entry.value === compatibility)?.api ??
"openai-completions";
let baseUrl = "";
let apiKey = "";
let modelId: string | undefined;
let discoveredModels: string[] | null = null;
let verifiedFromProbe = false;
if (compatibilityChoice === "unknown") {
let lastBaseUrl: string | undefined;
while (!compatibility) {
const baseInput = await promptBaseUrlAndKey({
prompter,
compatibility: compatibilityChoice,
initialBaseUrl: lastBaseUrl,
});
baseUrl = baseInput.baseUrl;
apiKey = baseInput.apiKey;
const models = await tryDiscoverOpenAiModels({ baseUrl, apiKey, prompter });
if (models && models.length > 0) {
compatibility = "openai";
providerApi = "openai-completions";
discoveredModels = models;
break;
}
modelId = (
await prompter.text({
message: "Model ID",
placeholder: "e.g. llama3, claude-3-7-sonnet",
validate: (val) => (val.trim() ? undefined : "Model ID is required"),
})
).trim();
const probeSpinner = prompter.progress("Detecting endpoint type...");
const openaiProbe = await requestOpenAiVerification({ baseUrl, apiKey, modelId });
if (openaiProbe.ok) {
probeSpinner.stop("Detected OpenAI-compatible endpoint.");
compatibility = "openai";
providerApi = "openai-completions";
verifiedFromProbe = true;
break;
}
const anthropicProbe = await requestAnthropicVerification({ baseUrl, apiKey, modelId });
if (anthropicProbe.ok) {
probeSpinner.stop("Detected Anthropic-compatible endpoint.");
compatibility = "anthropic";
providerApi = "anthropic-messages";
verifiedFromProbe = true;
break;
}
probeSpinner.stop("Could not detect endpoint type.");
await prompter.note(
"This endpoint did not respond to OpenAI or Anthropic style requests. Enter a new base URL and try again.",
"Endpoint detection",
);
lastBaseUrl = baseUrl;
modelId = undefined;
}
} else {
const baseInput = await promptBaseUrlAndKey({
prompter,
compatibility: compatibilityChoice,
});
baseUrl = baseInput.baseUrl;
apiKey = baseInput.apiKey;
compatibility = compatibilityChoice;
providerApi =
COMPATIBILITY_OPTIONS.find((entry) => entry.value === compatibility)?.api ??
"openai-completions";
}
if (!compatibility) {
return { config };
}
const providers = config.models?.providers ?? {};
const suggestedId = buildEndpointIdFromUrl(baseUrl);
@@ -192,76 +446,39 @@ export async function promptCustomApiConfig(params: {
}
const providerId = providerIdResult.providerId;
let modelId: string | undefined;
if (compatibility === "openai") {
const spinner = prompter.progress("Connecting...");
spinner.update(`Scanning models at ${baseUrl}...`);
try {
const discoveryUrl = new URL("models", baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`).href;
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), DISCOVERY_TIMEOUT_MS);
const headers: Record<string, string> = {};
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}
const res = await fetch(discoveryUrl, {
headers,
signal: controller.signal,
});
clearTimeout(timeout);
if (res.ok) {
const data = (await res.json()) as { data?: { id: string }[]; models?: { id: string }[] };
const rawModels = data.data || data.models || [];
const models = rawModels.map((m: unknown) => {
if (typeof m === "string") {
return m;
}
if (typeof m === "object" && m !== null && "id" in m) {
return (m as { id: string }).id;
}
return String(m);
});
if (models.length > 0) {
spinner.stop(`Found ${models.length} models.`);
const selection = await prompter.select({
message: "Select a model",
options: [
...models.map((id: string) => ({ value: id, label: id })),
{ value: "__manual", label: "(Enter manually...)" },
],
});
if (selection !== "__manual") {
modelId = selection;
}
} else {
spinner.stop("Connected, but no models list returned.");
}
} else {
spinner.stop(`Connection succeeded, but discovery failed (${res.status}).`);
}
} catch {
spinner.stop("Could not auto-detect models.");
}
} else {
await prompter.note(
"Anthropic-compatible endpoints do not expose a standard models endpoint. Please enter a model ID manually.",
"Model discovery",
);
if (compatibility === "openai" && !discoveredModels) {
discoveredModels = await tryDiscoverOpenAiModels({ baseUrl, apiKey, prompter });
}
if (!modelId) {
modelId = await prompter.text({
message: "Model ID",
placeholder: "e.g. llama3, claude-3-7-sonnet",
validate: (val) => (val.trim() ? undefined : "Model ID is required"),
});
if (compatibility === "openai" && discoveredModels && discoveredModels.length > 0) {
const selection = await prompter.select({
message: "Select a model",
options: [
...discoveredModels.map((id) => ({ value: id, label: id })),
{ value: "__manual", label: "(Enter manually...)" },
],
});
if (selection !== "__manual") {
modelId = selection;
}
} else if (compatibility === "anthropic") {
await prompter.note(
"Anthropic-compatible endpoints do not expose a standard models endpoint. Please enter a model ID manually.",
"Model discovery",
);
}
}
if (!modelId) {
modelId = (
await prompter.text({
message: "Model ID",
placeholder: "e.g. llama3, claude-3-7-sonnet",
validate: (val) => (val.trim() ? undefined : "Model ID is required"),
})
).trim();
}
modelId = modelId.trim();
const modelRef = modelKey(providerId, modelId);
const aliasInput = await prompter.text({
@@ -272,55 +489,21 @@ export async function promptCustomApiConfig(params: {
});
const alias = aliasInput.trim();
const verifySpinner = prompter.progress("Verifying...");
let verified = false;
try {
const endpoint = compatibility === "anthropic" ? "messages" : "chat/completions";
const verifyUrl = new URL(endpoint, baseUrl.endsWith("/") ? baseUrl : `${baseUrl}/`).href;
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), VERIFY_TIMEOUT_MS);
const headers: Record<string, string> = {
"Content-Type": "application/json",
};
if (compatibility === "anthropic") {
headers["anthropic-version"] = "2023-06-01";
if (apiKey) {
headers["x-api-key"] = apiKey;
}
} else if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}
const body =
let verified = verifiedFromProbe;
if (!verified) {
const verifySpinner = prompter.progress("Verifying...");
const result =
compatibility === "anthropic"
? {
model: modelId,
max_tokens: 16,
messages: [{ role: "user", content: "Hi" }],
}
: {
model: modelId,
messages: [{ role: "user", content: "Hi" }],
max_tokens: 5,
};
const res = await fetch(verifyUrl, {
method: "POST",
headers,
body: JSON.stringify(body),
signal: controller.signal,
});
clearTimeout(timeout);
if (res.ok) {
? await requestAnthropicVerification({ baseUrl, apiKey, modelId })
: await requestOpenAiVerification({ baseUrl, apiKey, modelId });
if (result.ok) {
verified = true;
verifySpinner.stop("Verification successful.");
} else if (result.status !== undefined) {
verifySpinner.stop(`Verification failed: status ${result.status}`);
} else {
verifySpinner.stop(`Verification failed: status ${res.status}`);
verifySpinner.stop(`Verification failed: ${formatVerificationError(result.error)}`);
}
} catch (err) {
verifySpinner.stop(`Verification failed: ${String(err)}`);
}
if (!verified) {