test(config): cover maxTokens clamping

This commit is contained in:
George Pickett
2026-02-08 14:04:58 -08:00
parent 5918affe49
commit b49b8380a5
2 changed files with 19 additions and 1 deletions

View File

@@ -215,7 +215,6 @@ export function applyModelDefaults(cfg: OpenClawConfig): OpenClawConfig {
}
const defaultMaxTokens = Math.min(DEFAULT_MODEL_MAX_TOKENS, contextWindow);
// Clamp maxTokens to contextWindow to prevent invalid configurations
const rawMaxTokens = isPositiveNumber(raw.maxTokens) ? raw.maxTokens : defaultMaxTokens;
const maxTokens = Math.min(rawMaxTokens, contextWindow);
if (raw.maxTokens !== maxTokens) {

View File

@@ -80,4 +80,23 @@ describe("applyModelDefaults", () => {
expect(model?.contextWindow).toBe(DEFAULT_CONTEXT_TOKENS);
expect(model?.maxTokens).toBe(8192);
});
it("clamps maxTokens to contextWindow", () => {
const cfg = {
models: {
providers: {
myproxy: {
api: "openai-completions",
models: [{ id: "gpt-5.2", name: "GPT-5.2", contextWindow: 32768, maxTokens: 40960 }],
},
},
},
} satisfies OpenClawConfig;
const next = applyModelDefaults(cfg);
const model = next.models?.providers?.myproxy?.models?.[0];
expect(model?.contextWindow).toBe(32768);
expect(model?.maxTokens).toBe(32768);
});
});