This commit is contained in:
di-sukharev
2025-08-01 16:13:54 +03:00
parent 22077399fd
commit c904a78cd9
2 changed files with 724 additions and 314 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -70705,10 +70705,10 @@ var ja_default = {
var ko_default = {
localLanguage: "\uD55C\uAD6D\uC5B4",
commitFix: "fix(server.ts): \uD3EC\uD2B8 \uBCC0\uC218\uB97C \uC18C\uBB38\uC790 port\uC5D0\uC11C \uB300\uBB38\uC790 PORT\uB85C \uBCC0\uACBD",
commitFeat: "\uD53C\uD2B8(server.ts): process.env.PORT \uD658\uACBD \uBCC0\uC218 \uC9C0\uC6D0 \uCD94\uAC00",
commitFeat: "feat(server.ts): process.env.PORT \uD658\uACBD \uBCC0\uC218 \uC9C0\uC6D0 \uCD94\uAC00",
commitDescription: "\uD3EC\uD2B8 \uBCC0\uC218\uB294 \uC774\uC81C PORT\uB85C \uC774\uB984\uC774 \uC9C0\uC815\uB418\uC5B4 \uC0C1\uC218\uC778 PORT\uC640 \uC77C\uAD00\uC131 \uC788\uB294 \uC774\uB984 \uADDC\uCE59\uC744 \uB530\uB985\uB2C8\uB2E4. \uD658\uACBD \uBCC0\uC218 \uC9C0\uC6D0\uC744 \uD1B5\uD574 \uC560\uD50C\uB9AC\uCF00\uC774\uC158\uC740 \uC774\uC81C process.env.PORT \uD658\uACBD \uBCC0\uC218\uB85C \uC9C0\uC815\uB41C \uC0AC\uC6A9 \uAC00\uB2A5\uD55C \uBAA8\uB4E0 \uD3EC\uD2B8\uC5D0\uC11C \uC2E4\uD589\uD560 \uC218 \uC788\uC73C\uBBC0\uB85C \uB354 \uC720\uC5F0\uD574\uC84C\uC2B5\uB2C8\uB2E4.",
commitFixOmitScope: "fix: \uD3EC\uD2B8 \uBCC0\uC218\uB97C \uC18C\uBB38\uC790 port\uC5D0\uC11C \uB300\uBB38\uC790 PORT\uB85C \uBCC0\uACBD",
commitFeatOmitScope: "\uD53C\uD2B8: process.env.PORT \uD658\uACBD \uBCC0\uC218 \uC9C0\uC6D0 \uCD94\uAC00"
commitFeatOmitScope: "feat: process.env.PORT \uD658\uACBD \uBCC0\uC218 \uC9C0\uC6D0 \uCD94\uAC00"
};
// src/i18n/nl.json
@@ -70883,6 +70883,7 @@ var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
CONFIG_KEYS2["OCO_API_CUSTOM_HEADERS"] = "OCO_API_CUSTOM_HEADERS";
CONFIG_KEYS2["OCO_OMIT_SCOPE"] = "OCO_OMIT_SCOPE";
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
CONFIG_KEYS2["OCO_HOOK_AUTO_UNCOMMENT"] = "OCO_HOOK_AUTO_UNCOMMENT";
return CONFIG_KEYS2;
})(CONFIG_KEYS || {});
var MODEL_LIST = {
@@ -70985,6 +70986,110 @@ var MODEL_LIST = {
"mistral-moderation-latest"
],
deepseek: ["deepseek-chat", "deepseek-reasoner"],
// AI/ML API available chat-completion models
// https://api.aimlapi.com/v1/models
aimlapi: [
"openai/gpt-4o",
"gpt-4o-2024-08-06",
"gpt-4o-2024-05-13",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"chatgpt-4o-latest",
"gpt-4-turbo",
"gpt-4-turbo-2024-04-09",
"gpt-4",
"gpt-4-0125-preview",
"gpt-4-1106-preview",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-1106",
"o1-preview",
"o1-preview-2024-09-12",
"o1-mini",
"o1-mini-2024-09-12",
"o3-mini",
"gpt-4o-audio-preview",
"gpt-4o-mini-audio-preview",
"gpt-4o-search-preview",
"gpt-4o-mini-search-preview",
"openai/gpt-4.1-2025-04-14",
"openai/gpt-4.1-mini-2025-04-14",
"openai/gpt-4.1-nano-2025-04-14",
"openai/o4-mini-2025-04-16",
"openai/o3-2025-04-16",
"o1",
"openai/o3-pro",
"meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo",
"google/gemma-2-27b-it",
"meta-llama/Llama-Vision-Free",
"Qwen/Qwen2-72B-Instruct",
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"nvidia/Llama-3.1-Nemotron-70B-Instruct-HF",
"NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
"meta-llama/Llama-3.3-70B-Instruct-Turbo",
"meta-llama/Llama-3.2-3B-Instruct-Turbo",
"meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo",
"meta-llama/Llama-Guard-3-11B-Vision-Turbo",
"Qwen/Qwen2.5-7B-Instruct-Turbo",
"Qwen/Qwen2.5-Coder-32B-Instruct",
"meta-llama/Meta-Llama-3-8B-Instruct-Lite",
"meta-llama/Llama-3-8b-chat-hf",
"meta-llama/Llama-3-70b-chat-hf",
"Qwen/Qwen2.5-72B-Instruct-Turbo",
"Qwen/QwQ-32B",
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
"mistralai/Mistral-7B-Instruct-v0.2",
"meta-llama/LlamaGuard-2-8b",
"mistralai/Mistral-7B-Instruct-v0.1",
"mistralai/Mistral-7B-Instruct-v0.3",
"meta-llama/Meta-Llama-Guard-3-8B",
"meta-llama/llama-4-scout",
"meta-llama/llama-4-maverick",
"Qwen/Qwen3-235B-A22B-fp8-tput",
"claude-3-opus-20240229",
"claude-3-haiku-20240307",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
"claude-3-7-sonnet-20250219",
"claude-sonnet-4-20250514",
"claude-opus-4-20250514",
"google/gemini-2.0-flash-exp",
"google/gemini-2.0-flash",
"google/gemini-2.5-pro",
"google/gemini-2.5-flash",
"deepseek-chat",
"deepseek-reasoner",
"qwen-max",
"qwen-plus",
"qwen-turbo",
"qwen-max-2025-01-25",
"mistralai/mistral-tiny",
"mistralai/mistral-nemo",
"anthracite-org/magnum-v4-72b",
"nvidia/llama-3.1-nemotron-70b-instruct",
"cohere/command-r-plus",
"mistralai/codestral-2501",
"google/gemma-3-4b-it",
"google/gemma-3-12b-it",
"google/gemma-3-27b-it",
"google/gemini-2.5-flash-lite-preview",
"deepseek/deepseek-prover-v2",
"google/gemma-3n-e4b-it",
"cohere/command-a",
"MiniMax-Text-01",
"abab6.5s-chat",
"minimax/m1",
"bagoodex/bagoodex-search-v1",
"moonshot/kimi-k2-preview",
"perplexity/sonar",
"perplexity/sonar-pro",
"x-ai/grok-4-07-09",
"x-ai/grok-3-beta",
"x-ai/grok-3-mini-beta"
],
// OpenRouter available models
// input_modalities: 'text'
// output_modalities: 'text'
@@ -71333,6 +71438,8 @@ var getDefaultModel = (provider) => {
return MODEL_LIST.mistral[0];
case "deepseek":
return MODEL_LIST.deepseek[0];
case "aimlapi":
return MODEL_LIST.aimlapi[0];
case "openrouter":
return MODEL_LIST.openrouter[0];
default:
@@ -71489,9 +71596,10 @@ var configValidators = {
"flowise",
"groq",
"deepseek",
"aimlapi",
"openrouter"
].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
);
return value;
},
@@ -71520,6 +71628,13 @@ var configValidators = {
"Must be true or false"
);
return value;
},
["OCO_HOOK_AUTO_UNCOMMENT" /* OCO_HOOK_AUTO_UNCOMMENT */](value) {
validateConfig(
"OCO_HOOK_AUTO_UNCOMMENT" /* OCO_HOOK_AUTO_UNCOMMENT */,
typeof value === "boolean",
"Must be true or false"
);
}
};
var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
@@ -71534,6 +71649,7 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
OCO_AI_PROVIDER_ENUM2["MISTRAL"] = "mistral";
OCO_AI_PROVIDER_ENUM2["MLX"] = "mlx";
OCO_AI_PROVIDER_ENUM2["DEEPSEEK"] = "deepseek";
OCO_AI_PROVIDER_ENUM2["AIMLAPI"] = "aimlapi";
OCO_AI_PROVIDER_ENUM2["OPENROUTER"] = "openrouter";
return OCO_AI_PROVIDER_ENUM2;
})(OCO_AI_PROVIDER_ENUM || {});
@@ -71558,8 +71674,9 @@ var DEFAULT_CONFIG = {
OCO_TEST_MOCK_TYPE: "commit-message",
OCO_WHY: false,
OCO_OMIT_SCOPE: false,
OCO_GITPUSH: true
OCO_GITPUSH: true,
// todo: deprecate
OCO_HOOK_AUTO_UNCOMMENT: false
};
var initGlobalConfig = (configPath = defaultConfigPath) => {
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(DEFAULT_CONFIG), "utf8");
@@ -71760,6 +71877,11 @@ function getConfigKeyDetails(key) {
description: "Message template placeholder",
values: ["String (must start with $)"]
};
case "OCO_HOOK_AUTO_UNCOMMENT" /* OCO_HOOK_AUTO_UNCOMMENT */:
return {
description: "Automatically uncomment the commit message in the hook",
values: ["true", "false"]
};
default:
return {
description: "String value",
@@ -87032,8 +87154,7 @@ var MistralAiEngine = class {
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput)
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
const completion = await this.client.chat.complete(params);
if (!completion.choices)
throw Error("No completion choice available.");
if (!completion.choices) throw Error("No completion choice available.");
const message = completion.choices[0].message;
if (!message || !message.content)
throw Error("No completion choice available.");
@@ -87052,7 +87173,10 @@ var MistralAiEngine = class {
if (!config6.baseURL) {
this.client = new Mistral({ apiKey: config6.apiKey });
} else {
this.client = new Mistral({ apiKey: config6.apiKey, serverURL: config6.baseURL });
this.client = new Mistral({
apiKey: config6.apiKey,
serverURL: config6.baseURL
});
}
}
};
@@ -87134,6 +87258,40 @@ var DeepseekEngine = class extends OpenAiEngine {
}
};
// src/engine/aimlapi.ts
var AimlApiEngine = class {
constructor(config6) {
this.config = config6;
this.generateCommitMessage = async (messages) => {
try {
const response = await this.client.post("", {
model: this.config.model,
messages
});
const message = response.data.choices?.[0]?.message;
return message?.content ?? null;
} catch (error) {
const err = error;
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
const apiError = error.response.data.error;
if (apiError) throw new Error(apiError.message);
}
throw err;
}
};
this.client = axios_default.create({
baseURL: config6.baseURL || "https://api.aimlapi.com/v1/chat/completions",
headers: {
Authorization: `Bearer ${config6.apiKey}`,
"HTTP-Referer": "https://github.com/di-sukharev/opencommit",
"X-Title": "opencommit",
"Content-Type": "application/json",
...config6.customHeaders
}
});
}
};
// src/engine/openrouter.ts
var OpenRouterEngine = class {
constructor(config6) {
@@ -87220,6 +87378,8 @@ function getEngine() {
return new MLXEngine(DEFAULT_CONFIG2);
case "deepseek" /* DEEPSEEK */:
return new DeepseekEngine(DEFAULT_CONFIG2);
case "aimlapi" /* AIMLAPI */:
return new AimlApiEngine(DEFAULT_CONFIG2);
case "openrouter" /* OPENROUTER */:
return new OpenRouterEngine(DEFAULT_CONFIG2);
default:
@@ -87733,7 +87893,10 @@ var config5 = getConfig();
var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT;
var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT;
var generateCommitMessageChatCompletionPrompt = async (diff, fullGitMojiSpec, context2) => {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec, context2);
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
fullGitMojiSpec,
context2
);
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
chatContextAsCompletionRequest.push({
role: "user",