Files
opencommit/test/unit/ollama.test.ts
Keith 2d9a26dc37 feat(ollama): add OCO_OLLAMA_THINK config to control thinking mode
Adds support for passing the `think` param to Ollama's /api/chat endpoint,
allowing users to disable reasoning blocks on models like qwen3.5 via
`oco config set OCO_OLLAMA_THINK=false`.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-07 18:27:38 +08:00

65 lines
1.5 KiB
TypeScript

import { OllamaEngine } from '../../src/engine/ollama';
describe('OllamaEngine', () => {
it('sends think=false when configured', async () => {
const engine = new OllamaEngine({
apiKey: 'ollama',
model: 'qwen3.5:2b',
maxTokensOutput: 500,
maxTokensInput: 4096,
ollamaThink: false
});
const post = jest.fn().mockResolvedValue({
data: {
message: {
content: 'feat: add support for ollama think config'
}
}
});
engine.client = { post } as any;
await engine.generateCommitMessage([
{ role: 'user', content: 'diff --git a/file b/file' }
]);
expect(post).toHaveBeenCalledWith(
'http://localhost:11434/api/chat',
expect.objectContaining({
think: false
})
);
});
it('omits think when not configured', async () => {
const engine = new OllamaEngine({
apiKey: 'ollama',
model: 'qwen3.5:2b',
maxTokensOutput: 500,
maxTokensInput: 4096
});
const post = jest.fn().mockResolvedValue({
data: {
message: {
content: 'feat: add support for ollama think config'
}
}
});
engine.client = { post } as any;
await engine.generateCommitMessage([
{ role: 'user', content: 'diff --git a/file b/file' }
]);
expect(post).toHaveBeenCalledWith(
'http://localhost:11434/api/chat',
expect.not.objectContaining({
think: expect.anything()
})
);
});
});