improve OCO_AI_PROVIDER=ollama[/model name] (#327)

* 🐛 (config.ts, engine/ollama.ts, utils/engine.ts): improve Ollama AI configuration and usage
 (config.ts): add support for multiple Ollama models and allow users to specify the model in their config
 (engine/ollama.ts, utils/engine.ts): refactor code to use the specified Ollama model instead of hardcoding it

* add build results
This commit is contained in:
tumf
2024-05-07 14:51:24 +07:00
committed by GitHub
parent 058bad95cd
commit 74024a4997
5 changed files with 4302 additions and 43880 deletions

View File

@@ -7,13 +7,18 @@ import { testAi } from '../engine/testAi';
export function getEngine(): AiEngine {
const config = getConfig();
if (config?.OCO_AI_PROVIDER == 'ollama') {
const provider = config?.OCO_AI_PROVIDER;
if (provider?.startsWith('ollama')) {
const model = provider.split('/')[1];
if (model) {
ollamaAi.setModel(model);
}
return ollamaAi;
} else if (config?.OCO_AI_PROVIDER == 'anthropic') {
return anthropicAi;
} else if (config?.OCO_AI_PROVIDER == 'test') {
return testAi;
}
//open ai gpt by default
// open ai gpt by default
return api;
}