Compare commits

..

12 Commits

Author SHA1 Message Date
di-sukharev
88d6a92567 fix gemini test 2024-09-06 12:10:21 +03:00
di-sukharev
36aab60393 Merge remote-tracking branch 'origin/dev' into refactoring_v2 2024-09-06 12:07:25 +03:00
di-sukharev
ba3c0b7e0b refactor(config.ts): remove unused setDefaultConfigValues function to clean up code
feat(config.ts): create getIsGlobalConfigFileExist function to check for config file existence
feat(migrations): add migration to set missing default values for configuration
fix(migrations): update migration functions to improve clarity and functionality
chore(migrations): register new migration for setting missing default values
style(migrations): format code for better readability in migration files
test(config.test.ts): update tests to improve readability and maintainability
2024-09-06 12:05:28 +03:00
di-sukharev
5209610236 fix(config.ts): ensure setConfig is only called when there are entries to set to prevent unnecessary function calls
feat(run.ts): add conditional to skip migrations if OCO_AI_PROVIDER is set to TEST to improve migration handling during testing
2024-09-04 11:54:54 +03:00
di-sukharev
d1f03f1105 feat(config): rename OCO_OPENAI_API_KEY to OCO_API_KEY for consistency and clarity
fix(config): add validation for OCO_WHY configuration key to ensure it is a boolean
refactor(config): extract default config setting logic into setDefaultConfigValues function
fix(cli): reorder function calls to ensure checkIsLatestVersion runs after runMigrations
chore(migrations): update getConfig calls to disable caching and default value setting
chore(migrations): remove obsolete configuration keys from global config file
fix(migrations): improve migration logging with consistent output formatting
style(migrations): enhance migration success and failure messages for better user feedback
2024-09-04 11:46:43 +03:00
di-sukharev
3df5b241dc fix(config.ts): set default values for OCO_TOKENS_MAX_INPUT and OCO_TOKENS_MAX_OUTPUT to ensure proper configuration
refactor(generateCommitMessageFromGitDiff.ts): simplify MAX_TOKENS_INPUT and MAX_TOKENS_OUTPUT assignments by removing redundant default value logic
2024-09-04 11:04:44 +03:00
di-sukharev
5ddf2cb21a Merge remote-tracking branch 'origin/dev' into refactoring_v2 2024-09-04 10:56:34 +03:00
di-sukharev
c6b6b2f3fd chore(package.json): remove unused dependency "ai" to clean up package.json
fix(commit.ts): update success message to start with a lowercase letter for consistency
2024-09-03 13:13:38 +03:00
di-sukharev
e1ce774538 chore(config): remove debug console logs to clean up the codebase
feat(migrations): add migration to remove obsolete config keys from global file to streamline configuration management
2024-09-02 12:29:41 +03:00
di-sukharev
f1ea54cf1c feat(docs): update README to reflect changes in API key and URL configuration for better clarity and usability
feat(migrations): implement migration to consolidate API key and URL into a single configuration for improved simplicity
refactor(config): rename configuration keys to use a single API key and URL for all LLM providers, enhancing consistency
fix(engine): update engine initialization to use new unified API key and URL configuration
test(config): update tests to validate new configuration structure and ensure backward compatibility with existing setups
2024-09-02 12:25:49 +03:00
di-sukharev
ccb7cd99e5 Merge branch 'dev' into refactoring_v2 2024-09-02 11:13:57 +03:00
di-sukharev
ce6ae0b514 refactor(engine): rename classes from FlowiseAi, Gemini, and OllamaAi to FlowiseEngine, GeminiEngine, and OllamaEngine for consistency and clarity
fix(engine): update imports and instantiation of renamed classes in engine utility functions to ensure proper functionality
2024-09-02 11:13:49 +03:00
11 changed files with 33 additions and 208 deletions

View File

@@ -27331,7 +27331,7 @@ function G3(t2, e3) {
// package.json // package.json
var package_default = { var package_default = {
name: "opencommit", name: "opencommit",
version: "3.2.2", version: "3.1.2",
description: "Auto-generate impressive commits in 1 second. Killing lame commits with AI \u{1F92F}\u{1F52B}", description: "Auto-generate impressive commits in 1 second. Killing lame commits with AI \u{1F92F}\u{1F52B}",
keywords: [ keywords: [
"git", "git",
@@ -27377,9 +27377,8 @@ var package_default = {
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts", "dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
build: "rimraf out && node esbuild.config.js", build: "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push", "build:push": "npm run build && git add . && git commit -m 'build' && git push",
deploy: "npm publish --tag latest", deploy: "npm run build:push && git push --tags && npm publish --tag latest",
"deploy:build": "npm run build:push && git push --tags && npm run deploy", "deploy:patch": "npm version patch && npm run deploy",
"deploy:patch": "npm version patch && npm run deploy:build",
lint: "eslint src --ext ts && tsc --noEmit", lint: "eslint src --ext ts && tsc --noEmit",
format: "prettier --write src", format: "prettier --write src",
test: "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit", test: "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
@@ -29918,15 +29917,6 @@ var MODEL_LIST = {
"gemini-1.0-pro", "gemini-1.0-pro",
"gemini-pro-vision", "gemini-pro-vision",
"text-embedding-004" "text-embedding-004"
],
groq: [
"llama3-70b-8192",
"llama3-8b-8192",
"llama-guard-3-8b",
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"gemma-7b-it",
"gemma2-9b-it"
] ]
}; };
var getDefaultModel = (provider) => { var getDefaultModel = (provider) => {
@@ -29937,8 +29927,6 @@ var getDefaultModel = (provider) => {
return MODEL_LIST.anthropic[0]; return MODEL_LIST.anthropic[0];
case "gemini": case "gemini":
return MODEL_LIST.gemini[0]; return MODEL_LIST.gemini[0];
case "groq":
return MODEL_LIST.groq[0];
default: default:
return MODEL_LIST.openai[0]; return MODEL_LIST.openai[0];
} }
@@ -30062,15 +30050,9 @@ var configValidators = {
value = "openai"; value = "openai";
validateConfig( validateConfig(
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */, "OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
[ ["openai", "anthropic", "gemini", "azure", "test", "flowise"].includes(
"openai", value
"anthropic", ) || value.startsWith("ollama"),
"gemini",
"azure",
"test",
"flowise",
"groq"
].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)` `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
); );
return value; return value;
@@ -30110,7 +30092,6 @@ var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
OCO_AI_PROVIDER_ENUM2["AZURE"] = "azure"; OCO_AI_PROVIDER_ENUM2["AZURE"] = "azure";
OCO_AI_PROVIDER_ENUM2["TEST"] = "test"; OCO_AI_PROVIDER_ENUM2["TEST"] = "test";
OCO_AI_PROVIDER_ENUM2["FLOWISE"] = "flowise"; OCO_AI_PROVIDER_ENUM2["FLOWISE"] = "flowise";
OCO_AI_PROVIDER_ENUM2["GROQ"] = "groq";
return OCO_AI_PROVIDER_ENUM2; return OCO_AI_PROVIDER_ENUM2;
})(OCO_AI_PROVIDER_ENUM || {}); })(OCO_AI_PROVIDER_ENUM || {});
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit"); var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -30127,6 +30108,7 @@ var DEFAULT_CONFIG = {
OCO_AI_PROVIDER: "openai" /* OPENAI */, OCO_AI_PROVIDER: "openai" /* OPENAI */,
OCO_ONE_LINE_COMMIT: false, OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: "commit-message", OCO_TEST_MOCK_TYPE: "commit-message",
OCO_FLOWISE_ENDPOINT: ":",
OCO_WHY: false, OCO_WHY: false,
OCO_GITPUSH: true OCO_GITPUSH: true
}; };
@@ -30186,25 +30168,6 @@ var mergeConfigs = (main, fallback) => {
return acc; return acc;
}, {}); }, {});
}; };
var cleanUndefinedValues = (config7) => {
return Object.fromEntries(
Object.entries(config7).map(([_7, v5]) => {
try {
if (typeof v5 === "string") {
if (v5 === "undefined")
return [_7, void 0];
if (v5 === "null")
return [_7, null];
const parsedValue = JSON.parse(v5);
return [_7, parsedValue];
}
return [_7, v5];
} catch (error) {
return [_7, v5];
}
})
);
};
var getConfig = ({ var getConfig = ({
envPath = defaultEnvPath, envPath = defaultEnvPath,
globalPath = defaultConfigPath globalPath = defaultConfigPath
@@ -30212,8 +30175,7 @@ var getConfig = ({
const envConfig = getEnvConfig(envPath); const envConfig = getEnvConfig(envPath);
const globalConfig = getGlobalConfig(globalPath); const globalConfig = getGlobalConfig(globalPath);
const config7 = mergeConfigs(envConfig, globalConfig); const config7 = mergeConfigs(envConfig, globalConfig);
const cleanConfig = cleanUndefinedValues(config7); return config7;
return cleanConfig;
}; };
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => { var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
const config7 = getConfig({ const config7 = getConfig({
@@ -44508,19 +44470,7 @@ var OpenAiEngine = class {
} }
}; };
this.config = config7; this.config = config7;
if (!config7.baseURL) { this.client = new OpenAI({ apiKey: config7.apiKey });
this.client = new OpenAI({ apiKey: config7.apiKey });
} else {
this.client = new OpenAI({ apiKey: config7.apiKey, baseURL: config7.baseURL });
}
}
};
// src/engine/groq.ts
var GroqEngine = class extends OpenAiEngine {
constructor(config7) {
config7.baseURL = "https://api.groq.com/openai/v1";
super(config7);
} }
}; };
@@ -44548,8 +44498,6 @@ function getEngine() {
return new AzureEngine(DEFAULT_CONFIG2); return new AzureEngine(DEFAULT_CONFIG2);
case "flowise" /* FLOWISE */: case "flowise" /* FLOWISE */:
return new FlowiseEngine(DEFAULT_CONFIG2); return new FlowiseEngine(DEFAULT_CONFIG2);
case "groq" /* GROQ */:
return new GroqEngine(DEFAULT_CONFIG2);
default: default:
return new OpenAiEngine(DEFAULT_CONFIG2); return new OpenAiEngine(DEFAULT_CONFIG2);
} }
@@ -45329,15 +45277,13 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
); );
ce(stdout); ce(stdout);
const remotes = await getGitRemotes(); const remotes = await getGitRemotes();
if (config6.OCO_GITPUSH === false)
return;
if (!remotes.length) { if (!remotes.length) {
const { stdout: stdout2 } = await execa("git", ["push"]); const { stdout: stdout2 } = await execa("git", ["push"]);
if (stdout2) if (stdout2)
ce(stdout2); ce(stdout2);
process.exit(0); process.exit(0);
} }
if (remotes.length === 1) { if (remotes.length === 1 && config6.OCO_GITPUSH !== true) {
const isPushConfirmedByUser = await Q3({ const isPushConfirmedByUser = await Q3({
message: "Do you want to run `git push`?" message: "Do you want to run `git push`?"
}); });
@@ -45393,10 +45339,7 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
} }
} }
} catch (error) { } catch (error) {
commitGenerationSpinner.stop( commitGenerationSpinner.stop("\u{1F4DD} Commit message generated");
`${source_default.red("\u2716")} Failed to generate the commit message`
);
console.log(error);
const err = error; const err = error;
ce(`${source_default.red("\u2716")} ${err?.message || err}`); ce(`${source_default.red("\u2716")} ${err?.message || err}`);
process.exit(1); process.exit(1);
@@ -45731,12 +45674,11 @@ function set_missing_default_values_default() {
const entriesToSet = []; const entriesToSet = [];
for (const entry of Object.entries(DEFAULT_CONFIG)) { for (const entry of Object.entries(DEFAULT_CONFIG)) {
const [key, _value] = entry; const [key, _value] = entry;
if (config7[key] === "undefined" || config7[key] === void 0) if (config7[key] === "undefined")
entriesToSet.push(entry); entriesToSet.push(entry);
} }
if (entriesToSet.length > 0) if (entriesToSet.length > 0)
setConfig(entriesToSet); setConfig(entriesToSet);
console.log(entriesToSet);
}; };
setDefaultConfigValues(getGlobalConfig()); setDefaultConfigValues(getGlobalConfig());
} }
@@ -45793,7 +45735,6 @@ var runMigrations = async () => {
ce( ce(
`${source_default.red("Failed to apply migration")} ${migration.name}: ${error}` `${source_default.red("Failed to apply migration")} ${migration.name}: ${error}`
); );
process.exit(1);
} }
isMigrated = true; isMigrated = true;
} }

View File

@@ -48730,15 +48730,6 @@ var MODEL_LIST = {
"gemini-1.0-pro", "gemini-1.0-pro",
"gemini-pro-vision", "gemini-pro-vision",
"text-embedding-004" "text-embedding-004"
],
groq: [
"llama3-70b-8192",
"llama3-8b-8192",
"llama-guard-3-8b",
"llama-3.1-8b-instant",
"llama-3.1-70b-versatile",
"gemma-7b-it",
"gemma2-9b-it"
] ]
}; };
var getDefaultModel = (provider) => { var getDefaultModel = (provider) => {
@@ -48749,8 +48740,6 @@ var getDefaultModel = (provider) => {
return MODEL_LIST.anthropic[0]; return MODEL_LIST.anthropic[0];
case "gemini": case "gemini":
return MODEL_LIST.gemini[0]; return MODEL_LIST.gemini[0];
case "groq":
return MODEL_LIST.groq[0];
default: default:
return MODEL_LIST.openai[0]; return MODEL_LIST.openai[0];
} }
@@ -48874,15 +48863,9 @@ var configValidators = {
value = "openai"; value = "openai";
validateConfig( validateConfig(
"OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */, "OCO_AI_PROVIDER" /* OCO_AI_PROVIDER */,
[ ["openai", "anthropic", "gemini", "azure", "test", "flowise"].includes(
"openai", value
"anthropic", ) || value.startsWith("ollama"),
"gemini",
"azure",
"test",
"flowise",
"groq"
].includes(value) || value.startsWith("ollama"),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)` `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
); );
return value; return value;
@@ -48928,6 +48911,7 @@ var DEFAULT_CONFIG = {
OCO_AI_PROVIDER: "openai" /* OPENAI */, OCO_AI_PROVIDER: "openai" /* OPENAI */,
OCO_ONE_LINE_COMMIT: false, OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: "commit-message", OCO_TEST_MOCK_TYPE: "commit-message",
OCO_FLOWISE_ENDPOINT: ":",
OCO_WHY: false, OCO_WHY: false,
OCO_GITPUSH: true OCO_GITPUSH: true
}; };
@@ -48987,25 +48971,6 @@ var mergeConfigs = (main, fallback) => {
return acc; return acc;
}, {}); }, {});
}; };
var cleanUndefinedValues = (config6) => {
return Object.fromEntries(
Object.entries(config6).map(([_3, v2]) => {
try {
if (typeof v2 === "string") {
if (v2 === "undefined")
return [_3, void 0];
if (v2 === "null")
return [_3, null];
const parsedValue = JSON.parse(v2);
return [_3, parsedValue];
}
return [_3, v2];
} catch (error) {
return [_3, v2];
}
})
);
};
var getConfig = ({ var getConfig = ({
envPath = defaultEnvPath, envPath = defaultEnvPath,
globalPath = defaultConfigPath globalPath = defaultConfigPath
@@ -49013,8 +48978,7 @@ var getConfig = ({
const envConfig = getEnvConfig(envPath); const envConfig = getEnvConfig(envPath);
const globalConfig = getGlobalConfig(globalPath); const globalConfig = getGlobalConfig(globalPath);
const config6 = mergeConfigs(envConfig, globalConfig); const config6 = mergeConfigs(envConfig, globalConfig);
const cleanConfig = cleanUndefinedValues(config6); return config6;
return cleanConfig;
}; };
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => { var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
const config6 = getConfig({ const config6 = getConfig({
@@ -63309,19 +63273,7 @@ var OpenAiEngine = class {
} }
}; };
this.config = config6; this.config = config6;
if (!config6.baseURL) { this.client = new OpenAI({ apiKey: config6.apiKey });
this.client = new OpenAI({ apiKey: config6.apiKey });
} else {
this.client = new OpenAI({ apiKey: config6.apiKey, baseURL: config6.baseURL });
}
}
};
// src/engine/groq.ts
var GroqEngine = class extends OpenAiEngine {
constructor(config6) {
config6.baseURL = "https://api.groq.com/openai/v1";
super(config6);
} }
}; };
@@ -63349,8 +63301,6 @@ function getEngine() {
return new AzureEngine(DEFAULT_CONFIG2); return new AzureEngine(DEFAULT_CONFIG2);
case "flowise" /* FLOWISE */: case "flowise" /* FLOWISE */:
return new FlowiseEngine(DEFAULT_CONFIG2); return new FlowiseEngine(DEFAULT_CONFIG2);
case "groq" /* GROQ */:
return new GroqEngine(DEFAULT_CONFIG2);
default: default:
return new OpenAiEngine(DEFAULT_CONFIG2); return new OpenAiEngine(DEFAULT_CONFIG2);
} }

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "opencommit", "name": "opencommit",
"version": "3.2.2", "version": "3.1.2",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "opencommit", "name": "opencommit",
"version": "3.2.2", "version": "3.1.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",

View File

@@ -1,6 +1,6 @@
{ {
"name": "opencommit", "name": "opencommit",
"version": "3.2.2", "version": "3.1.2",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫", "description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [ "keywords": [
"git", "git",
@@ -46,9 +46,8 @@
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts", "dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
"build": "rimraf out && node esbuild.config.js", "build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push", "build:push": "npm run build && git add . && git commit -m 'build' && git push",
"deploy": "npm publish --tag latest", "deploy": "npm run build:push && git push --tags && npm publish --tag latest",
"deploy:build": "npm run build:push && git push --tags && npm run deploy", "deploy:patch": "npm version patch && npm run deploy",
"deploy:patch": "npm version patch && npm run deploy:build",
"lint": "eslint src --ext ts && tsc --noEmit", "lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src", "format": "prettier --write src",
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit", "test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",

View File

@@ -183,11 +183,7 @@ ${chalk.grey('——————————————————')}`
} }
} }
} catch (error) { } catch (error) {
commitGenerationSpinner.stop( commitGenerationSpinner.stop('📝 Commit message generated');
`${chalk.red('✖')} Failed to generate the commit message`
);
console.log(error);
const err = error as Error; const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`); outro(`${chalk.red('✖')} ${err?.message || err}`);

View File

@@ -76,16 +76,6 @@ export const MODEL_LIST = {
'gemini-1.0-pro', 'gemini-1.0-pro',
'gemini-pro-vision', 'gemini-pro-vision',
'text-embedding-004' 'text-embedding-004'
],
groq: [
'llama3-70b-8192', // Meta Llama 3 70B (default one, no daily token limit and 14 400 reqs/day)
'llama3-8b-8192', // Meta Llama 3 8B
'llama-guard-3-8b', // Llama Guard 3 8B
'llama-3.1-8b-instant', // Llama 3.1 8B (Preview)
'llama-3.1-70b-versatile', // Llama 3.1 70B (Preview)
'gemma-7b-it', // Gemma 7B
'gemma2-9b-it' // Gemma 2 9B
] ]
}; };
@@ -97,8 +87,6 @@ const getDefaultModel = (provider: string | undefined): string => {
return MODEL_LIST.anthropic[0]; return MODEL_LIST.anthropic[0];
case 'gemini': case 'gemini':
return MODEL_LIST.gemini[0]; return MODEL_LIST.gemini[0];
case 'groq':
return MODEL_LIST.groq[0];
default: default:
return MODEL_LIST.openai[0]; return MODEL_LIST.openai[0];
} }
@@ -253,15 +241,9 @@ export const configValidators = {
validateConfig( validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER, CONFIG_KEYS.OCO_AI_PROVIDER,
[ ['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise'].includes(
'openai', value
'anthropic', ) || value.startsWith('ollama'),
'gemini',
'azure',
'test',
'flowise',
'groq'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)` `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
); );
@@ -306,8 +288,7 @@ export enum OCO_AI_PROVIDER_ENUM {
GEMINI = 'gemini', GEMINI = 'gemini',
AZURE = 'azure', AZURE = 'azure',
TEST = 'test', TEST = 'test',
FLOWISE = 'flowise', FLOWISE = 'flowise'
GROQ = 'groq'
} }
export type ConfigType = { export type ConfigType = {
@@ -371,6 +352,7 @@ export const DEFAULT_CONFIG = {
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI, OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
OCO_ONE_LINE_COMMIT: false, OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: 'commit-message', OCO_TEST_MOCK_TYPE: 'commit-message',
OCO_FLOWISE_ENDPOINT: ':',
OCO_WHY: false, OCO_WHY: false,
OCO_GITPUSH: true // todo: deprecate OCO_GITPUSH: true // todo: deprecate
}; };
@@ -462,25 +444,6 @@ interface GetConfigOptions {
setDefaultValues?: boolean; setDefaultValues?: boolean;
} }
const cleanUndefinedValues = (config: ConfigType) => {
return Object.fromEntries(
Object.entries(config).map(([_, v]) => {
try {
if (typeof v === 'string') {
if (v === 'undefined') return [_, undefined];
if (v === 'null') return [_, null];
const parsedValue = JSON.parse(v);
return [_, parsedValue];
}
return [_, v];
} catch (error) {
return [_, v];
}
})
);
};
export const getConfig = ({ export const getConfig = ({
envPath = defaultEnvPath, envPath = defaultEnvPath,
globalPath = defaultConfigPath globalPath = defaultConfigPath
@@ -490,9 +453,7 @@ export const getConfig = ({
const config = mergeConfigs(envConfig, globalConfig); const config = mergeConfigs(envConfig, globalConfig);
const cleanConfig = cleanUndefinedValues(config); return config;
return cleanConfig as ConfigType;
}; };
export const setConfig = ( export const setConfig = (

View File

@@ -1,10 +0,0 @@
import { OpenAiConfig, OpenAiEngine } from './openAi';
interface GroqConfig extends OpenAiConfig {}
export class GroqEngine extends OpenAiEngine {
constructor(config: GroqConfig) {
config.baseURL = 'https://api.groq.com/openai/v1';
super(config);
}
}

View File

@@ -4,7 +4,7 @@ import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitD
import { tokenCount } from '../utils/tokenCount'; import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine'; import { AiEngine, AiEngineConfig } from './Engine';
export interface OpenAiConfig extends AiEngineConfig {} interface OpenAiConfig extends AiEngineConfig {}
export class OpenAiEngine implements AiEngine { export class OpenAiEngine implements AiEngine {
config: OpenAiConfig; config: OpenAiConfig;
@@ -12,12 +12,7 @@ export class OpenAiEngine implements AiEngine {
constructor(config: OpenAiConfig) { constructor(config: OpenAiConfig) {
this.config = config; this.config = config;
this.client = new OpenAI({ apiKey: config.apiKey });
if (!config.baseURL) {
this.client = new OpenAI({ apiKey: config.apiKey });
} else {
this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
}
} }
public generateCommitMessage = async ( public generateCommitMessage = async (

View File

@@ -10,12 +10,10 @@ export default function () {
const entriesToSet: [key: string, value: string | boolean | number][] = []; const entriesToSet: [key: string, value: string | boolean | number][] = [];
for (const entry of Object.entries(DEFAULT_CONFIG)) { for (const entry of Object.entries(DEFAULT_CONFIG)) {
const [key, _value] = entry; const [key, _value] = entry;
if (config[key] === 'undefined' || config[key] === undefined) if (config[key] === 'undefined') entriesToSet.push(entry);
entriesToSet.push(entry);
} }
if (entriesToSet.length > 0) setConfig(entriesToSet); if (entriesToSet.length > 0) setConfig(entriesToSet);
console.log(entriesToSet);
}; };
setDefaultConfigValues(getGlobalConfig()); setDefaultConfigValues(getGlobalConfig());

View File

@@ -53,7 +53,6 @@ export const runMigrations = async () => {
migration.name migration.name
}: ${error}` }: ${error}`
); );
process.exit(1);
} }
isMigrated = true; isMigrated = true;

View File

@@ -7,7 +7,6 @@ import { GeminiEngine } from '../engine/gemini';
import { OllamaEngine } from '../engine/ollama'; import { OllamaEngine } from '../engine/ollama';
import { OpenAiEngine } from '../engine/openAi'; import { OpenAiEngine } from '../engine/openAi';
import { TestAi, TestMockType } from '../engine/testAi'; import { TestAi, TestMockType } from '../engine/testAi';
import { GroqEngine } from '../engine/groq';
export function getEngine(): AiEngine { export function getEngine(): AiEngine {
const config = getConfig(); const config = getConfig();
@@ -40,9 +39,6 @@ export function getEngine(): AiEngine {
case OCO_AI_PROVIDER_ENUM.FLOWISE: case OCO_AI_PROVIDER_ENUM.FLOWISE:
return new FlowiseEngine(DEFAULT_CONFIG); return new FlowiseEngine(DEFAULT_CONFIG);
case OCO_AI_PROVIDER_ENUM.GROQ:
return new GroqEngine(DEFAULT_CONFIG);
default: default:
return new OpenAiEngine(DEFAULT_CONFIG); return new OpenAiEngine(DEFAULT_CONFIG);
} }