refactor(test): share models-config e2e setup

This commit is contained in:
Peter Steinberger
2026-02-14 21:04:53 +00:00
parent 5f55a53f0e
commit 96f80d6d82
6 changed files with 164 additions and 402 deletions

View File

@@ -1,53 +1,15 @@
import fs from "node:fs/promises";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
import { describe, expect, it, vi } from "vitest";
import {
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
const _MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};
installModelsConfigTestHooks({ restoreFetch: true });
describe("models-config", () => {
let previousHome: string | undefined;
const originalFetch = globalThis.fetch;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
if (originalFetch) {
globalThis.fetch = originalFetch;
}
});
it("auto-injects github-copilot provider when token is present", async () => {
await withTempHome(async (home) => {
const previous = process.env.COPILOT_GITHUB_TOKEN;
@@ -74,7 +36,11 @@ describe("models-config", () => {
expect(parsed.providers["github-copilot"]?.baseUrl).toBe("https://api.copilot.example");
expect(parsed.providers["github-copilot"]?.models?.length ?? 0).toBe(0);
} finally {
process.env.COPILOT_GITHUB_TOKEN = previous;
if (previous === undefined) {
delete process.env.COPILOT_GITHUB_TOKEN;
} else {
process.env.COPILOT_GITHUB_TOKEN = previous;
}
}
});
});

View File

@@ -0,0 +1,104 @@
import { afterEach, beforeEach } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
export async function withModelsTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
export function installModelsConfigTestHooks(opts?: { restoreFetch?: boolean }) {
let previousHome: string | undefined;
const originalFetch = globalThis.fetch;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
if (opts?.restoreFetch && originalFetch) {
globalThis.fetch = originalFetch;
}
});
}
export async function withTempEnv<T>(vars: string[], fn: () => Promise<T>): Promise<T> {
const previous: Record<string, string | undefined> = {};
for (const envVar of vars) {
previous[envVar] = process.env[envVar];
}
try {
return await fn();
} finally {
for (const envVar of vars) {
const value = previous[envVar];
if (value === undefined) {
delete process.env[envVar];
} else {
process.env[envVar] = value;
}
}
}
}
export function unsetEnv(vars: string[]) {
for (const envVar of vars) {
delete process.env[envVar];
}
}
export const MODELS_CONFIG_IMPLICIT_ENV_VARS = [
"CLOUDFLARE_AI_GATEWAY_API_KEY",
"COPILOT_GITHUB_TOKEN",
"GH_TOKEN",
"GITHUB_TOKEN",
"HF_TOKEN",
"HUGGINGFACE_HUB_TOKEN",
"MINIMAX_API_KEY",
"MOONSHOT_API_KEY",
"NVIDIA_API_KEY",
"OLLAMA_API_KEY",
"OPENCLAW_AGENT_DIR",
"PI_CODING_AGENT_DIR",
"QIANFAN_API_KEY",
"SYNTHETIC_API_KEY",
"TOGETHER_API_KEY",
"VENICE_API_KEY",
"VLLM_API_KEY",
"XIAOMI_API_KEY",
// Avoid ambient AWS creds unintentionally enabling Bedrock discovery.
"AWS_ACCESS_KEY_ID",
"AWS_CONFIG_FILE",
"AWS_BEARER_TOKEN_BEDROCK",
"AWS_DEFAULT_REGION",
"AWS_PROFILE",
"AWS_REGION",
"AWS_SESSION_TOKEN",
"AWS_SECRET_ACCESS_KEY",
"AWS_SHARED_CREDENTIALS_FILE",
];
export const CUSTOM_PROXY_MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};

View File

@@ -1,54 +1,16 @@
import fs from "node:fs/promises";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
import { describe, expect, it, vi } from "vitest";
import { DEFAULT_COPILOT_API_BASE_URL } from "../providers/github-copilot-token.js";
import {
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
const _MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};
installModelsConfigTestHooks({ restoreFetch: true });
describe("models-config", () => {
let previousHome: string | undefined;
const originalFetch = globalThis.fetch;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
if (originalFetch) {
globalThis.fetch = originalFetch;
}
});
it("falls back to default baseUrl when token exchange fails", async () => {
await withTempHome(async () => {
const previous = process.env.COPILOT_GITHUB_TOKEN;
@@ -71,7 +33,11 @@ describe("models-config", () => {
expect(parsed.providers["github-copilot"]?.baseUrl).toBe(DEFAULT_COPILOT_API_BASE_URL);
} finally {
process.env.COPILOT_GITHUB_TOKEN = previous;
if (previous === undefined) {
delete process.env.COPILOT_GITHUB_TOKEN;
} else {
process.env.COPILOT_GITHUB_TOKEN = previous;
}
}
});
});

View File

@@ -1,50 +1,18 @@
import fs from "node:fs/promises";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
CUSTOM_PROXY_MODELS_CONFIG,
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
const MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};
installModelsConfigTestHooks();
describe("models-config", () => {
let previousHome: string | undefined;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
});
it("fills missing provider.apiKey from env var name when models exist", async () => {
await withTempHome(async () => {
const prevKey = process.env.MINIMAX_API_KEY;
@@ -125,7 +93,7 @@ describe("models-config", () => {
"utf8",
);
await ensureOpenClawModelsJson(MODELS_CONFIG);
await ensureOpenClawModelsJson(CUSTOM_PROXY_MODELS_CONFIG);
const raw = await fs.readFile(path.join(agentDir, "models.json"), "utf8");
const parsed = JSON.parse(raw) as {

View File

@@ -1,110 +1,30 @@
import fs from "node:fs/promises";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
import { describe, expect, it } from "vitest";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
CUSTOM_PROXY_MODELS_CONFIG,
installModelsConfigTestHooks,
MODELS_CONFIG_IMPLICIT_ENV_VARS,
unsetEnv,
withTempEnv,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
const MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};
installModelsConfigTestHooks();
describe("models-config", () => {
let previousHome: string | undefined;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
});
it("skips writing models.json when no env token or profile exists", async () => {
await withTempHome(async (home) => {
const previous = process.env.COPILOT_GITHUB_TOKEN;
const previousGh = process.env.GH_TOKEN;
const previousGithub = process.env.GITHUB_TOKEN;
const previousKimiCode = process.env.KIMI_API_KEY;
const previousMinimax = process.env.MINIMAX_API_KEY;
const previousMoonshot = process.env.MOONSHOT_API_KEY;
const previousSynthetic = process.env.SYNTHETIC_API_KEY;
const previousVenice = process.env.VENICE_API_KEY;
const previousXiaomi = process.env.XIAOMI_API_KEY;
const previousOllama = process.env.OLLAMA_API_KEY;
const previousVllm = process.env.VLLM_API_KEY;
const previousTogether = process.env.TOGETHER_API_KEY;
const previousHuggingfaceHub = process.env.HUGGINGFACE_HUB_TOKEN;
const previousHuggingfaceHf = process.env.HF_TOKEN;
const previousQianfan = process.env.QIANFAN_API_KEY;
const previousNvidia = process.env.NVIDIA_API_KEY;
const previousAwsAccessKeyId = process.env.AWS_ACCESS_KEY_ID;
const previousAwsSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const previousAwsSessionToken = process.env.AWS_SESSION_TOKEN;
const previousAwsProfile = process.env.AWS_PROFILE;
const previousAwsRegion = process.env.AWS_REGION;
const previousAwsDefaultRegion = process.env.AWS_DEFAULT_REGION;
const previousAwsSharedCredentials = process.env.AWS_SHARED_CREDENTIALS_FILE;
const previousAwsConfigFile = process.env.AWS_CONFIG_FILE;
const previousAgentDir = process.env.OPENCLAW_AGENT_DIR;
const previousPiAgentDir = process.env.PI_CODING_AGENT_DIR;
delete process.env.COPILOT_GITHUB_TOKEN;
delete process.env.GH_TOKEN;
delete process.env.GITHUB_TOKEN;
delete process.env.KIMI_API_KEY;
delete process.env.MINIMAX_API_KEY;
delete process.env.MOONSHOT_API_KEY;
delete process.env.SYNTHETIC_API_KEY;
delete process.env.VENICE_API_KEY;
delete process.env.XIAOMI_API_KEY;
delete process.env.OLLAMA_API_KEY;
delete process.env.VLLM_API_KEY;
delete process.env.TOGETHER_API_KEY;
delete process.env.HUGGINGFACE_HUB_TOKEN;
delete process.env.HF_TOKEN;
delete process.env.QIANFAN_API_KEY;
delete process.env.NVIDIA_API_KEY;
delete process.env.AWS_ACCESS_KEY_ID;
delete process.env.AWS_SECRET_ACCESS_KEY;
delete process.env.AWS_SESSION_TOKEN;
delete process.env.AWS_PROFILE;
delete process.env.AWS_REGION;
delete process.env.AWS_DEFAULT_REGION;
delete process.env.AWS_SHARED_CREDENTIALS_FILE;
delete process.env.AWS_CONFIG_FILE;
delete process.env.OPENCLAW_AGENT_DIR;
delete process.env.PI_CODING_AGENT_DIR;
await withTempEnv([...MODELS_CONFIG_IMPLICIT_ENV_VARS, "KIMI_API_KEY"], async () => {
unsetEnv([...MODELS_CONFIG_IMPLICIT_ENV_VARS, "KIMI_API_KEY"]);
try {
const agentDir = path.join(home, "agent-empty");
// Avoid merging in the user's real main auth store via OPENCLAW_AGENT_DIR.
// ensureAuthProfileStore merges the main auth store into non-main dirs; point main at our temp dir.
process.env.OPENCLAW_AGENT_DIR = agentDir;
process.env.PI_CODING_AGENT_DIR = agentDir;
const result = await ensureOpenClawModelsJson(
{
models: { providers: {} },
@@ -114,143 +34,13 @@ describe("models-config", () => {
await expect(fs.stat(path.join(agentDir, "models.json"))).rejects.toThrow();
expect(result.wrote).toBe(false);
} finally {
if (previous === undefined) {
delete process.env.COPILOT_GITHUB_TOKEN;
} else {
process.env.COPILOT_GITHUB_TOKEN = previous;
}
if (previousGh === undefined) {
delete process.env.GH_TOKEN;
} else {
process.env.GH_TOKEN = previousGh;
}
if (previousGithub === undefined) {
delete process.env.GITHUB_TOKEN;
} else {
process.env.GITHUB_TOKEN = previousGithub;
}
if (previousKimiCode === undefined) {
delete process.env.KIMI_API_KEY;
} else {
process.env.KIMI_API_KEY = previousKimiCode;
}
if (previousMinimax === undefined) {
delete process.env.MINIMAX_API_KEY;
} else {
process.env.MINIMAX_API_KEY = previousMinimax;
}
if (previousMoonshot === undefined) {
delete process.env.MOONSHOT_API_KEY;
} else {
process.env.MOONSHOT_API_KEY = previousMoonshot;
}
if (previousSynthetic === undefined) {
delete process.env.SYNTHETIC_API_KEY;
} else {
process.env.SYNTHETIC_API_KEY = previousSynthetic;
}
if (previousVenice === undefined) {
delete process.env.VENICE_API_KEY;
} else {
process.env.VENICE_API_KEY = previousVenice;
}
if (previousXiaomi === undefined) {
delete process.env.XIAOMI_API_KEY;
} else {
process.env.XIAOMI_API_KEY = previousXiaomi;
}
if (previousOllama === undefined) {
delete process.env.OLLAMA_API_KEY;
} else {
process.env.OLLAMA_API_KEY = previousOllama;
}
if (previousVllm === undefined) {
delete process.env.VLLM_API_KEY;
} else {
process.env.VLLM_API_KEY = previousVllm;
}
if (previousTogether === undefined) {
delete process.env.TOGETHER_API_KEY;
} else {
process.env.TOGETHER_API_KEY = previousTogether;
}
if (previousHuggingfaceHub === undefined) {
delete process.env.HUGGINGFACE_HUB_TOKEN;
} else {
process.env.HUGGINGFACE_HUB_TOKEN = previousHuggingfaceHub;
}
if (previousHuggingfaceHf === undefined) {
delete process.env.HF_TOKEN;
} else {
process.env.HF_TOKEN = previousHuggingfaceHf;
}
if (previousQianfan === undefined) {
delete process.env.QIANFAN_API_KEY;
} else {
process.env.QIANFAN_API_KEY = previousQianfan;
}
if (previousNvidia === undefined) {
delete process.env.NVIDIA_API_KEY;
} else {
process.env.NVIDIA_API_KEY = previousNvidia;
}
if (previousAwsAccessKeyId === undefined) {
delete process.env.AWS_ACCESS_KEY_ID;
} else {
process.env.AWS_ACCESS_KEY_ID = previousAwsAccessKeyId;
}
if (previousAwsSecretAccessKey === undefined) {
delete process.env.AWS_SECRET_ACCESS_KEY;
} else {
process.env.AWS_SECRET_ACCESS_KEY = previousAwsSecretAccessKey;
}
if (previousAwsSessionToken === undefined) {
delete process.env.AWS_SESSION_TOKEN;
} else {
process.env.AWS_SESSION_TOKEN = previousAwsSessionToken;
}
if (previousAwsProfile === undefined) {
delete process.env.AWS_PROFILE;
} else {
process.env.AWS_PROFILE = previousAwsProfile;
}
if (previousAwsRegion === undefined) {
delete process.env.AWS_REGION;
} else {
process.env.AWS_REGION = previousAwsRegion;
}
if (previousAwsDefaultRegion === undefined) {
delete process.env.AWS_DEFAULT_REGION;
} else {
process.env.AWS_DEFAULT_REGION = previousAwsDefaultRegion;
}
if (previousAwsSharedCredentials === undefined) {
delete process.env.AWS_SHARED_CREDENTIALS_FILE;
} else {
process.env.AWS_SHARED_CREDENTIALS_FILE = previousAwsSharedCredentials;
}
if (previousAwsConfigFile === undefined) {
delete process.env.AWS_CONFIG_FILE;
} else {
process.env.AWS_CONFIG_FILE = previousAwsConfigFile;
}
if (previousAgentDir === undefined) {
delete process.env.OPENCLAW_AGENT_DIR;
} else {
process.env.OPENCLAW_AGENT_DIR = previousAgentDir;
}
if (previousPiAgentDir === undefined) {
delete process.env.PI_CODING_AGENT_DIR;
} else {
process.env.PI_CODING_AGENT_DIR = previousPiAgentDir;
}
}
});
});
});
it("writes models.json for configured providers", async () => {
await withTempHome(async () => {
await ensureOpenClawModelsJson(MODELS_CONFIG);
await ensureOpenClawModelsJson(CUSTOM_PROXY_MODELS_CONFIG);
const modelPath = path.join(resolveOpenClawAgentDir(), "models.json");
const raw = await fs.readFile(modelPath, "utf8");
@@ -261,6 +51,7 @@ describe("models-config", () => {
expect(parsed.providers["custom-proxy"]?.baseUrl).toBe("http://localhost:4000/v1");
});
});
it("adds minimax provider when MINIMAX_API_KEY is set", async () => {
await withTempHome(async () => {
const prevKey = process.env.MINIMAX_API_KEY;
@@ -294,6 +85,7 @@ describe("models-config", () => {
}
});
});
it("adds synthetic provider when SYNTHETIC_API_KEY is set", async () => {
await withTempHome(async () => {
const prevKey = process.env.SYNTHETIC_API_KEY;

View File

@@ -1,54 +1,16 @@
import fs from "node:fs/promises";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import { withTempHome as withTempHomeBase } from "../../test/helpers/temp-home.js";
import { describe, expect, it, vi } from "vitest";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
installModelsConfigTestHooks,
withModelsTempHome as withTempHome,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
async function withTempHome<T>(fn: (home: string) => Promise<T>): Promise<T> {
return withTempHomeBase(fn, { prefix: "openclaw-models-" });
}
const _MODELS_CONFIG: OpenClawConfig = {
models: {
providers: {
"custom-proxy": {
baseUrl: "http://localhost:4000/v1",
apiKey: "TEST_KEY",
api: "openai-completions",
models: [
{
id: "llama-3.1-8b",
name: "Llama 3.1 8B (Proxy)",
api: "openai-completions",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 32000,
},
],
},
},
},
};
installModelsConfigTestHooks({ restoreFetch: true });
describe("models-config", () => {
let previousHome: string | undefined;
const originalFetch = globalThis.fetch;
beforeEach(() => {
previousHome = process.env.HOME;
});
afterEach(() => {
process.env.HOME = previousHome;
if (originalFetch) {
globalThis.fetch = originalFetch;
}
});
it("uses the first github-copilot profile when env tokens are missing", async () => {
await withTempHome(async (home) => {
const previous = process.env.COPILOT_GITHUB_TOKEN;
@@ -153,7 +115,11 @@ describe("models-config", () => {
expect(parsed.providers["github-copilot"]?.baseUrl).toBe("https://copilot.local");
} finally {
process.env.COPILOT_GITHUB_TOKEN = previous;
if (previous === undefined) {
delete process.env.COPILOT_GITHUB_TOKEN;
} else {
process.env.COPILOT_GITHUB_TOKEN = previous;
}
}
});
});