mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-04-20 03:02:51 -04:00
feat(ollama): add OCO_OLLAMA_THINK config to control thinking mode
Adds support for passing the `think` param to Ollama's /api/chat endpoint, allowing users to disable reasoning blocks on models like qwen3.5 via `oco config set OCO_OLLAMA_THINK=false`. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -29,7 +29,8 @@ export enum CONFIG_KEYS {
|
||||
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
|
||||
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT'
|
||||
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT',
|
||||
OCO_OLLAMA_THINK = 'OCO_OLLAMA_THINK'
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
@@ -838,6 +839,15 @@ export const configValidators = {
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OLLAMA_THINK](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OLLAMA_THINK,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -905,6 +915,7 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean;
|
||||
[CONFIG_KEYS.OCO_OLLAMA_THINK]?: boolean;
|
||||
};
|
||||
|
||||
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
|
||||
@@ -13,6 +13,7 @@ export interface AiEngineConfig {
|
||||
baseURL?: string;
|
||||
proxy?: string;
|
||||
customHeaders?: Record<string, string>;
|
||||
ollamaThink?: boolean;
|
||||
}
|
||||
|
||||
type Client =
|
||||
|
||||
@@ -4,7 +4,9 @@ import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
interface OllamaConfig extends AiEngineConfig {
|
||||
ollamaThink?: boolean;
|
||||
}
|
||||
|
||||
const DEFAULT_OLLAMA_URL = 'http://localhost:11434';
|
||||
const OLLAMA_CHAT_PATH = '/api/chat';
|
||||
@@ -32,12 +34,15 @@ export class OllamaEngine implements AiEngine {
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const params = {
|
||||
const params: Record<string, any> = {
|
||||
model: this.config.model ?? 'mistral',
|
||||
messages,
|
||||
options: { temperature: 0, top_p: 0.1 },
|
||||
stream: false
|
||||
};
|
||||
if (typeof this.config.ollamaThink === 'boolean') {
|
||||
params.think = this.config.ollamaThink;
|
||||
}
|
||||
try {
|
||||
const response = await this.client.post(this.chatUrl, params);
|
||||
|
||||
|
||||
@@ -54,7 +54,10 @@ export function getEngine(): AiEngine {
|
||||
|
||||
switch (provider) {
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
return new OllamaEngine(DEFAULT_CONFIG);
|
||||
return new OllamaEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
ollamaThink: config.OCO_OLLAMA_THINK
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return new AnthropicEngine(DEFAULT_CONFIG);
|
||||
|
||||
Reference in New Issue
Block a user