mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-04-20 03:02:51 -04:00
feat(engine): add DeepSeekEngine (#446)
Add DeepSeekEngine to support DeepSeek API. This includes a new DeepSeekConfig interface and updates to the engine selection logic. feat(README.md, src/commands/config.ts): Add DeepSeek support Adds support for the DeepSeek AI provider. Updates the README, config validation, and model list to include DeepSeek. This allows users to utilize DeepSeek models with the OpenCommit tool. fix(deepseek.ts): update DeepSeek API base URL to include version number v1 refactor(deepseek.ts): improve DeepSeekEngine constructor The DeepSeekEngine constructor is refactored to use the spread syntax for better readability and maintainability when merging config parameters. The baseURL is now explicitly set within the constructor. fix(README.md): remove Groq from the list of supported AI providers refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo Revert "refactor(deepseek.ts): rename interface DeepseekConfig to DeepSeekEngineeekConfig and fix typo" This reverts commit f492367d3885fa97cd685feca889f93d6c465b2f. refactor(deepseek.ts): Rename DeepseekConfig to DeepSeekConfig for consistency ✨ feat(engine): add DeepSeekEngine to support DeepSeek API ♻️ refactor(engine): improve OpenAiEngine and create a new DeepSeekEngine class to handle DeepSeek API requests. The DeepSeekEngine class inherits from OpenAiEngine and overrides the generateCommitMessage method to use the DeepSeek API. This change improves code organization and maintainability. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were incorrect, causing a runtime error. This commit corrects the import path and class name to `DeepseekEngine` to resolve the issue. Revert "🐛 Fix: Correct DeepSeekEngine import and class name" This reverts commit 738fd36c434d9df9c3a458b1e8230c974bd2a76e. 🐛 Fix: Correct DeepSeekEngine import and class name The import path and class name for DeepSeekEngine were corrected to match the actual file and class name. This fixes a runtime error. Restore ./out directory to master state
This commit is contained in:
@@ -106,7 +106,7 @@ Create a `.env` file and add OpenCommit config variables there like this:
|
||||
|
||||
```env
|
||||
...
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek>
|
||||
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
|
||||
OCO_API_URL=<may be used to set proxy path to OpenAI api>
|
||||
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
|
||||
|
||||
@@ -128,6 +128,10 @@ export const MODEL_LIST = {
|
||||
'mistral-embed',
|
||||
'mistral-moderation-2411',
|
||||
'mistral-moderation-latest',
|
||||
],
|
||||
deepseek : [
|
||||
'deepseek-chat',
|
||||
'deepseek-reasoner',
|
||||
]
|
||||
};
|
||||
|
||||
@@ -145,6 +149,8 @@ const getDefaultModel = (provider: string | undefined): string => {
|
||||
return MODEL_LIST.groq[0];
|
||||
case 'mistral':
|
||||
return MODEL_LIST.mistral[0];
|
||||
case 'deepseek':
|
||||
return MODEL_LIST.deepseek[0];
|
||||
default:
|
||||
return MODEL_LIST.openai[0];
|
||||
}
|
||||
@@ -184,7 +190,7 @@ export const configValidators = {
|
||||
validateConfig(
|
||||
'OCO_API_KEY',
|
||||
value,
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -307,9 +313,10 @@ export const configValidators = {
|
||||
'azure',
|
||||
'test',
|
||||
'flowise',
|
||||
'groq'
|
||||
'groq',
|
||||
'deepseek'
|
||||
].includes(value) || value.startsWith('ollama'),
|
||||
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
|
||||
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -356,7 +363,8 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
FLOWISE = 'flowise',
|
||||
GROQ = 'groq',
|
||||
MISTRAL = 'mistral',
|
||||
MLX = 'mlx'
|
||||
MLX = 'mlx',
|
||||
DEEPSEEK = 'deepseek'
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
|
||||
60
src/engine/deepseek.ts
Normal file
60
src/engine/deepseek.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { OpenAiEngine, OpenAiConfig } from './openAI';
|
||||
|
||||
export interface DeepseekConfig extends OpenAiConfig {}
|
||||
|
||||
export class DeepseekEngine extends OpenAiEngine {
|
||||
constructor(config: DeepseekConfig) {
|
||||
// Call OpenAIEngine constructor with forced Deepseek baseURL
|
||||
super({
|
||||
...config,
|
||||
baseURL: 'https://api.deepseek.com/v1'
|
||||
});
|
||||
}
|
||||
|
||||
// Identical method from OpenAiEngine, re-implemented here
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null> => {
|
||||
const params = {
|
||||
model: this.config.model,
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
};
|
||||
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (
|
||||
REQUEST_TOKENS >
|
||||
this.config.maxTokensInput - this.config.maxTokensOutput
|
||||
)
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
|
||||
const completion = await this.client.chat.completions.create(params);
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError) throw new Error(openAiError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -10,6 +10,7 @@ import { MistralAiEngine } from '../engine/mistral';
|
||||
import { TestAi, TestMockType } from '../engine/testAi';
|
||||
import { GroqEngine } from '../engine/groq';
|
||||
import { MLXEngine } from '../engine/mlx';
|
||||
import { DeepseekEngine } from '../engine/deepseek';
|
||||
|
||||
export function getEngine(): AiEngine {
|
||||
const config = getConfig();
|
||||
@@ -51,6 +52,9 @@ export function getEngine(): AiEngine {
|
||||
case OCO_AI_PROVIDER_ENUM.MLX:
|
||||
return new MLXEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
return new DeepseekEngine(DEFAULT_CONFIG);
|
||||
|
||||
default:
|
||||
return new OpenAiEngine(DEFAULT_CONFIG);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user