improve OCO_AI_PROVIDER=ollama[/model name] (#327)

* 🐛 (config.ts, engine/ollama.ts, utils/engine.ts): improve Ollama AI configuration and usage
 (config.ts): add support for multiple Ollama models and allow users to specify the model in their config
 (engine/ollama.ts, utils/engine.ts): refactor code to use the specified Ollama model instead of hardcoding it

* add build results
This commit is contained in:
tumf
2024-05-07 14:51:24 +07:00
committed by GitHub
parent 058bad95cd
commit 74024a4997
5 changed files with 4302 additions and 43880 deletions

View File

@@ -3,10 +3,15 @@ import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
export class OllamaAi implements AiEngine {
private model = "mistral"; // as default model of Ollama
setModel(model: string) {
this.model = model;
}
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
const model = 'mistral'; // todo: allow other models
const model = this.model;
//console.log(messages);
//process.exit()