diff --git a/README.md b/README.md index 93f8ea3..845c905 100644 --- a/README.md +++ b/README.md @@ -211,9 +211,9 @@ oco config set OCO_LANGUAGE=française The default language setting is **English** All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder -### Push to git +### Push to git (gonna be deprecated) -Pushing to git is on by default but if you would like to turn it off just use: +A prompt to ushing to git is on by default but if you would like to turn it off just use: ```sh oco config set OCO_GITPUSH=false diff --git a/out/cli.cjs b/out/cli.cjs index cdf6397..f640074 100755 --- a/out/cli.cjs +++ b/out/cli.cjs @@ -28023,49 +28023,86 @@ var assertConfigsAreValid = (config7) => { } } }; +var initGlobalConfig = () => { + const defaultConfig = { + OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */, + OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */, + OCO_DESCRIPTION: false, + OCO_EMOJI: false, + OCO_MODEL: getDefaultModel("openai"), + OCO_LANGUAGE: "en", + OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg", + OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */, + OCO_AI_PROVIDER: "openai" /* OPENAI */, + OCO_GITPUSH: true, + OCO_ONE_LINE_COMMIT: false, + OCO_TEST_MOCK_TYPE: "commit-message", + OCO_FLOWISE_ENDPOINT: ":" + }; + (0, import_fs.writeFileSync)(defaultConfigPath, (0, import_ini.stringify)(defaultConfig), "utf8"); + return defaultConfig; +}; +var parseEnvVarValue = (value) => { + if (!value) + return null; + try { + return JSON.parse(value); + } catch (error) { + return value; + } +}; var getConfig = ({ configPath = defaultConfigPath, envPath = defaultEnvPath } = {}) => { dotenv.config({ path: envPath }); const configFromEnv = { + OCO_MODEL: process.env.OCO_MODEL, OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY, OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY, OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY, OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY, - OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT ? Number(process.env.OCO_TOKENS_MAX_INPUT) : 40960 /* DEFAULT_MAX_TOKENS_INPUT */, - OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT ? Number(process.env.OCO_TOKENS_MAX_OUTPUT) : 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */, + OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY, + OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT), + OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT), OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH, OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH, - OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === "true" ? true : false, - OCO_EMOJI: process.env.OCO_EMOJI === "true" ? true : false, - OCO_MODEL: process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER), - OCO_LANGUAGE: process.env.OCO_LANGUAGE || "en", - OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || "$msg", - OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || "conventional-commit", - OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || "openai", - OCO_GITPUSH: process.env.OCO_GITPUSH === "false" ? false : true, - OCO_ONE_LINE_COMMIT: process.env.OCO_ONE_LINE_COMMIT === "true" ? true : false, - OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || void 0, - OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || "commit-message", - OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ":", - OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || void 0, - OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || void 0 + OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT, + OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT, + OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL, + OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION), + OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI), + OCO_LANGUAGE: process.env.OCO_LANGUAGE, + OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, + OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE, + OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER, + OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT), + OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE, + OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) }; + let globalConfig; const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath); if (!isGlobalConfigFileExist) - return configFromEnv; - const configFile = (0, import_fs.readFileSync)(configPath, "utf8"); - const globalConfig = (0, import_ini.parse)(configFile); - const config7 = Object.keys(globalConfig).reduce((acc, key) => { - acc[key] = configFromEnv[key] || globalConfig[key]; - return acc; - }, {}); + globalConfig = initGlobalConfig(); + else { + const configFile = (0, import_fs.readFileSync)(configPath, "utf8"); + globalConfig = (0, import_ini.parse)(configFile); + } + function mergeObjects(main, fallback) { + return Object.keys(fallback).reduce( + (acc, key) => { + acc[key] = main[key] !== void 0 ? main[key] : fallback[key]; + return acc; + }, + { ...main } + ); + } + const config7 = mergeObjects(configFromEnv, globalConfig); return config7; }; var setConfig = (keyValues, configPath = defaultConfigPath) => { const keysToSet = keyValues.map(([key, value]) => `${key} to ${value}`).join(", "); - const config7 = getConfig() || {}; + const config7 = getConfig(); for (let [key, value] of keyValues) { if (!configValidators.hasOwnProperty(key)) { const supportedKeys = Object.keys(configValidators).join("\n"); @@ -28091,7 +28128,7 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit` } (0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config7), "utf8"); assertConfigsAreValid(config7); - ce(`${source_default.green("\u2714")} Config successfully set`); + ce(`${source_default.green("\u2714")} config successfully set: ${keysToSet}`); }; var configCommand = G3( { @@ -37180,8 +37217,7 @@ var FlowiseAi = class { constructor(config7) { this.config = config7; this.client = axios_default.create({ - url: `api/v1/prediction/${config7.apiKey}`, - baseURL: config7.baseURL, + url: `${config7.baseURL}/${config7.apiKey}`, headers: { "Content-Type": "application/json" } }); } @@ -38034,8 +38070,7 @@ var OllamaAi = class { constructor(config7) { this.config = config7; this.client = axios_default.create({ - url: config7.baseURL ? `api/v1/prediction/${config7.apiKey}` : "http://localhost:11434/api/chat", - baseURL: config7.baseURL, + url: config7.baseURL ? `${config7.baseURL}/${config7.apiKey}` : "http://localhost:11434/api/chat", headers: { "Content-Type": "application/json" } }); } @@ -42371,13 +42406,14 @@ function getEngine() { return new TestAi(config7.OCO_TEST_MOCK_TYPE); case "gemini": return new Gemini({ + ...DEFAULT_CONFIG, apiKey: config7.OCO_GEMINI_API_KEY, - ...DEFAULT_CONFIG + baseURL: config7.OCO_GEMINI_BASE_PATH }); case "azure": return new AzureEngine({ - apiKey: config7.OCO_AZURE_API_KEY, - ...DEFAULT_CONFIG + ...DEFAULT_CONFIG, + apiKey: config7.OCO_AZURE_API_KEY }); case "flowise": return new FlowiseAi({ @@ -42868,8 +42904,8 @@ function mergeDiffs(arr, maxStringLength) { // src/generateCommitMessageFromGitDiff.ts var config5 = getConfig(); -var MAX_TOKENS_INPUT = config5?.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */; -var MAX_TOKENS_OUTPUT = config5?.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */; +var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */; +var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */; var generateCommitMessageChatCompletionPrompt = async (diff, fullGitMojiSpec) => { const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec); const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT]; @@ -43156,8 +43192,6 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2 ce(`${source_default.green("\u2714")} Successfully committed`); ce(stdout); const remotes = await getGitRemotes(); - if (config6.OCO_GITPUSH === false) - return; if (!remotes.length) { const { stdout: stdout2 } = await execa("git", ["push"]); if (stdout2) @@ -43427,7 +43461,7 @@ var prepareCommitMessageHook = async (isStageAllFlag = false) => { return; ae("opencommit"); const config7 = getConfig(); - if (!config7?.OCO_OPENAI_API_KEY && !config7?.OCO_ANTHROPIC_API_KEY && !config7?.OCO_AZURE_API_KEY) { + if (!config7.OCO_OPENAI_API_KEY && !config7.OCO_ANTHROPIC_API_KEY && !config7.OCO_AZURE_API_KEY) { throw new Error( "No OPEN_AI_API or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key in ~/.opencommit" ); diff --git a/out/github-action.cjs b/out/github-action.cjs index 9899614..26fdaab 100644 --- a/out/github-action.cjs +++ b/out/github-action.cjs @@ -46835,49 +46835,86 @@ var assertConfigsAreValid = (config6) => { } } }; +var initGlobalConfig = () => { + const defaultConfig = { + OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */, + OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */, + OCO_DESCRIPTION: false, + OCO_EMOJI: false, + OCO_MODEL: getDefaultModel("openai"), + OCO_LANGUAGE: "en", + OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg", + OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */, + OCO_AI_PROVIDER: "openai" /* OPENAI */, + OCO_GITPUSH: true, + OCO_ONE_LINE_COMMIT: false, + OCO_TEST_MOCK_TYPE: "commit-message", + OCO_FLOWISE_ENDPOINT: ":" + }; + (0, import_fs.writeFileSync)(defaultConfigPath, (0, import_ini.stringify)(defaultConfig), "utf8"); + return defaultConfig; +}; +var parseEnvVarValue = (value) => { + if (!value) + return null; + try { + return JSON.parse(value); + } catch (error) { + return value; + } +}; var getConfig = ({ configPath = defaultConfigPath, envPath = defaultEnvPath } = {}) => { dotenv.config({ path: envPath }); const configFromEnv = { + OCO_MODEL: process.env.OCO_MODEL, OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY, OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY, OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY, OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY, - OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT ? Number(process.env.OCO_TOKENS_MAX_INPUT) : 40960 /* DEFAULT_MAX_TOKENS_INPUT */, - OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT ? Number(process.env.OCO_TOKENS_MAX_OUTPUT) : 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */, + OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY, + OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT), + OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT), OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH, OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH, - OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === "true" ? true : false, - OCO_EMOJI: process.env.OCO_EMOJI === "true" ? true : false, - OCO_MODEL: process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER), - OCO_LANGUAGE: process.env.OCO_LANGUAGE || "en", - OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || "$msg", - OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || "conventional-commit", - OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || "openai", - OCO_GITPUSH: process.env.OCO_GITPUSH === "false" ? false : true, - OCO_ONE_LINE_COMMIT: process.env.OCO_ONE_LINE_COMMIT === "true" ? true : false, - OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || void 0, - OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || "commit-message", - OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ":", - OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || void 0, - OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || void 0 + OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT, + OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT, + OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL, + OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION), + OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI), + OCO_LANGUAGE: process.env.OCO_LANGUAGE, + OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, + OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE, + OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER, + OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT), + OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE, + OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) }; + let globalConfig; const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath); if (!isGlobalConfigFileExist) - return configFromEnv; - const configFile = (0, import_fs.readFileSync)(configPath, "utf8"); - const globalConfig = (0, import_ini.parse)(configFile); - const config6 = Object.keys(globalConfig).reduce((acc, key) => { - acc[key] = configFromEnv[key] || globalConfig[key]; - return acc; - }, {}); + globalConfig = initGlobalConfig(); + else { + const configFile = (0, import_fs.readFileSync)(configPath, "utf8"); + globalConfig = (0, import_ini.parse)(configFile); + } + function mergeObjects(main, fallback) { + return Object.keys(fallback).reduce( + (acc, key) => { + acc[key] = main[key] !== void 0 ? main[key] : fallback[key]; + return acc; + }, + { ...main } + ); + } + const config6 = mergeObjects(configFromEnv, globalConfig); return config6; }; var setConfig = (keyValues, configPath = defaultConfigPath) => { const keysToSet = keyValues.map(([key, value]) => `${key} to ${value}`).join(", "); - const config6 = getConfig() || {}; + const config6 = getConfig(); for (let [key, value] of keyValues) { if (!configValidators.hasOwnProperty(key)) { const supportedKeys = Object.keys(configValidators).join("\n"); @@ -46903,7 +46940,7 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit` } (0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config6), "utf8"); assertConfigsAreValid(config6); - ce(`${source_default.green("\u2714")} Config successfully set`); + ce(`${source_default.green("\u2714")} config successfully set: ${keysToSet}`); }; var configCommand = G2( { @@ -55992,8 +56029,7 @@ var FlowiseAi = class { constructor(config6) { this.config = config6; this.client = axios_default.create({ - url: `api/v1/prediction/${config6.apiKey}`, - baseURL: config6.baseURL, + url: `${config6.baseURL}/${config6.apiKey}`, headers: { "Content-Type": "application/json" } }); } @@ -56846,8 +56882,7 @@ var OllamaAi = class { constructor(config6) { this.config = config6; this.client = axios_default.create({ - url: config6.baseURL ? `api/v1/prediction/${config6.apiKey}` : "http://localhost:11434/api/chat", - baseURL: config6.baseURL, + url: config6.baseURL ? `${config6.baseURL}/${config6.apiKey}` : "http://localhost:11434/api/chat", headers: { "Content-Type": "application/json" } }); } @@ -61183,13 +61218,14 @@ function getEngine() { return new TestAi(config6.OCO_TEST_MOCK_TYPE); case "gemini": return new Gemini({ + ...DEFAULT_CONFIG, apiKey: config6.OCO_GEMINI_API_KEY, - ...DEFAULT_CONFIG + baseURL: config6.OCO_GEMINI_BASE_PATH }); case "azure": return new AzureEngine({ - apiKey: config6.OCO_AZURE_API_KEY, - ...DEFAULT_CONFIG + ...DEFAULT_CONFIG, + apiKey: config6.OCO_AZURE_API_KEY }); case "flowise": return new FlowiseAi({ @@ -61680,8 +61716,8 @@ function mergeDiffs(arr, maxStringLength) { // src/generateCommitMessageFromGitDiff.ts var config5 = getConfig(); -var MAX_TOKENS_INPUT = config5?.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */; -var MAX_TOKENS_OUTPUT = config5?.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */; +var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */; +var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */; var generateCommitMessageChatCompletionPrompt = async (diff, fullGitMojiSpec) => { const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec); const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT]; diff --git a/src/commands/commit.ts b/src/commands/commit.ts index d12214e..ff10138 100644 --- a/src/commands/commit.ts +++ b/src/commands/commit.ts @@ -102,9 +102,6 @@ ${chalk.grey('——————————————————')}` const remotes = await getGitRemotes(); - // user isn't pushing, return early - if (config.OCO_GITPUSH === false) return; - if (!remotes.length) { const { stdout } = await execa('git', ['push']); if (stdout) outro(stdout); diff --git a/src/commands/config.ts b/src/commands/config.ts index cb6e0ee..63f94be 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -26,7 +26,7 @@ export enum CONFIG_KEYS { OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER', OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE', OCO_AI_PROVIDER = 'OCO_AI_PROVIDER', - OCO_GITPUSH = 'OCO_GITPUSH', + OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT', OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT', OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE', @@ -280,6 +280,7 @@ export const configValidators = { return value; }, + // todo: deprecate [CONFIG_KEYS.OCO_GITPUSH](value: any) { validateConfig( CONFIG_KEYS.OCO_GITPUSH, @@ -354,8 +355,39 @@ export const configValidators = { } }; +enum OCO_AI_PROVIDER_ENUM { + OPENAI = 'openai', + ANTHROPIC = 'anthropic', + GEMINI = 'gemini', + AZURE = 'azure', + TEST = 'test', + FLOWISE = 'flowise' +} + export type ConfigType = { - [key in CONFIG_KEYS]?: any; + [CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string; + [CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string; + [CONFIG_KEYS.OCO_AZURE_API_KEY]?: string; + [CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string; + [CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string; + [CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number; + [CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number; + [CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string; + [CONFIG_KEYS.OCO_DESCRIPTION]: boolean; + [CONFIG_KEYS.OCO_EMOJI]: boolean; + [CONFIG_KEYS.OCO_MODEL]: string; + [CONFIG_KEYS.OCO_LANGUAGE]: string; + [CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER]: string; + [CONFIG_KEYS.OCO_PROMPT_MODULE]: OCO_PROMPT_MODULE_ENUM; + [CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM; + [CONFIG_KEYS.OCO_GITPUSH]: boolean; + [CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean; + [CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string; + [CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string; + [CONFIG_KEYS.OCO_API_URL]?: string; + [CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string; + [CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string; + [CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string; }; const defaultConfigPath = pathJoin(homedir(), '.opencommit'); @@ -384,6 +416,42 @@ const assertConfigsAreValid = (config: Record) => { } }; +enum OCO_PROMPT_MODULE_ENUM { + CONVENTIONAL_COMMIT = 'conventional-commit', + COMMITLINT = '@commitlint' +} + +const initGlobalConfig = () => { + const defaultConfig = { + OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT, + OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT, + OCO_DESCRIPTION: false, + OCO_EMOJI: false, + OCO_MODEL: getDefaultModel('openai'), + OCO_LANGUAGE: 'en', + OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg', + OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT, + OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI, + OCO_GITPUSH: true, // todo: deprecate + OCO_ONE_LINE_COMMIT: false, + OCO_TEST_MOCK_TYPE: 'commit-message', + OCO_FLOWISE_ENDPOINT: ':' + }; + + writeFileSync(defaultConfigPath, iniStringify(defaultConfig), 'utf8'); + return defaultConfig; +}; + +const parseEnvVarValue = (value?: any) => { + if (!value) return null; + + try { + return JSON.parse(value); + } catch (error) { + return value; + } +}; + export const getConfig = ({ configPath = defaultConfigPath, envPath = defaultEnvPath @@ -394,49 +462,57 @@ export const getConfig = ({ dotenv.config({ path: envPath }); const configFromEnv = { + OCO_MODEL: process.env.OCO_MODEL, + OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY, OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY, OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY, OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY, - OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT - ? Number(process.env.OCO_TOKENS_MAX_INPUT) - : DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT, - OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT - ? Number(process.env.OCO_TOKENS_MAX_OUTPUT) - : DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT, + OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY, + + OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT), + OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT), + OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH, OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH, - OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false, - OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false, - OCO_MODEL: - process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER), - OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en', + + OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT, + OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT, + OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL, + + OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION), + OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI), + OCO_LANGUAGE: process.env.OCO_LANGUAGE, OCO_MESSAGE_TEMPLATE_PLACEHOLDER: - process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg', - OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit', - OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai', - OCO_GITPUSH: process.env.OCO_GITPUSH === 'false' ? false : true, - OCO_ONE_LINE_COMMIT: - process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false, - OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || undefined, - OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE || 'commit-message', - OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT || ':', - OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY || undefined, - OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL || undefined + process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, + OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM, + OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM, + OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT), + OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE, + + OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate }; + let globalConfig: ConfigType; const isGlobalConfigFileExist = existsSync(configPath); + if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(); + else { + const configFile = readFileSync(configPath, 'utf8'); + globalConfig = iniParse(configFile) as ConfigType; + } - if (!isGlobalConfigFileExist) return configFromEnv; - - const configFile = readFileSync(configPath, 'utf8'); - const globalConfig = iniParse(configFile); + function mergeObjects(main: Partial, fallback: ConfigType) { + return Object.keys(fallback).reduce( + (acc, key) => { + acc[key] = main[key] !== undefined ? main[key] : fallback[key]; + return acc; + }, + { ...main } + ); + } // env config takes precedence over global ~/.opencommit config file - const config = Object.keys(globalConfig).reduce((acc, key) => { - acc[key] = configFromEnv[key] || globalConfig[key]; - return acc; - }, {} as typeof configFromEnv); + const config = mergeObjects(configFromEnv, globalConfig); return config; }; @@ -449,7 +525,7 @@ export const setConfig = ( .map(([key, value]) => `${key} to ${value}`) .join(', '); - const config = getConfig() || {}; + const config = getConfig(); for (let [key, value] of keyValues) { if (!configValidators.hasOwnProperty(key)) { @@ -479,7 +555,7 @@ export const setConfig = ( assertConfigsAreValid(config); - outro(`${chalk.green('✔')} Config successfully set`); + outro(`${chalk.green('✔')} config successfully set: ${keysToSet}`); }; export const configCommand = command( diff --git a/test/unit/config.test.ts b/test/unit/config.test.ts index 3112fb1..15b227e 100644 --- a/test/unit/config.test.ts +++ b/test/unit/config.test.ts @@ -3,6 +3,9 @@ import { prepareFile } from './utils'; describe('getConfig', () => { const originalEnv = { ...process.env }; + let globalConfigFile: { filePath: string; cleanup: () => Promise }; + let localEnvFile: { filePath: string; cleanup: () => Promise }; + function resetEnv(env: NodeJS.ProcessEnv) { Object.keys(process.env).forEach((key) => { if (!(key in env)) { @@ -13,93 +16,132 @@ describe('getConfig', () => { }); } - beforeEach(() => { + beforeEach(async () => { resetEnv(originalEnv); + if (globalConfigFile) await globalConfigFile.cleanup(); + if (localEnvFile) await localEnvFile.cleanup(); }); afterAll(() => { resetEnv(originalEnv); }); - it('return config values from the global config file', async () => { - const configFile = await prepareFile( + const generateConfig = async (fileName: string, content: string) => { + return await prepareFile(fileName, content); + }; + + it('should prioritize local .env over global .opencommit config', async () => { + globalConfigFile = await generateConfig( '.opencommit', ` -OCO_OPENAI_API_KEY="sk-key" -OCO_ANTHROPIC_API_KEY="secret-key" -OCO_TOKENS_MAX_INPUT="8192" -OCO_TOKENS_MAX_OUTPUT="1000" -OCO_OPENAI_BASE_PATH="/openai/api" -OCO_DESCRIPTION="true" -OCO_EMOJI="true" -OCO_MODEL="gpt-4" -OCO_LANGUAGE="de" -OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m" -OCO_PROMPT_MODULE="@commitlint" -OCO_AI_PROVIDER="ollama" -OCO_GITPUSH="false" -OCO_ONE_LINE_COMMIT="true" +OCO_OPENAI_API_KEY="global-key" +OCO_MODEL="gpt-3.5-turbo" +OCO_LANGUAGE="en" ` ); - const config = getConfig({ configPath: configFile.filePath, envPath: '' }); - expect(config).not.toEqual(null); - expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key'); - expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key'); - expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192); - expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000); - expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api'); - expect(config!['OCO_DESCRIPTION']).toEqual(true); - expect(config!['OCO_EMOJI']).toEqual(true); - expect(config!['OCO_MODEL']).toEqual('gpt-4'); - expect(config!['OCO_LANGUAGE']).toEqual('de'); - expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m'); - expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint'); - expect(() => ['ollama', 'gemini'].includes(config!['OCO_AI_PROVIDER'])).toBeTruthy(); - expect(config!['OCO_GITPUSH']).toEqual(false); - expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true); - - await configFile.cleanup(); - }); - - it('return config values from the local env file', async () => { - const envFile = await prepareFile( + localEnvFile = await generateConfig( '.env', ` -OCO_OPENAI_API_KEY="sk-key" -OCO_ANTHROPIC_API_KEY="secret-key" -OCO_TOKENS_MAX_INPUT="8192" -OCO_TOKENS_MAX_OUTPUT="1000" -OCO_OPENAI_BASE_PATH="/openai/api" -OCO_DESCRIPTION="true" -OCO_EMOJI="true" -OCO_MODEL="gpt-4" -OCO_LANGUAGE="de" -OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m" -OCO_PROMPT_MODULE="@commitlint" -OCO_AI_PROVIDER="ollama" -OCO_GITPUSH="false" -OCO_ONE_LINE_COMMIT="true" - ` +OCO_OPENAI_API_KEY="local-key" +OCO_ANTHROPIC_API_KEY="local-anthropic-key" +OCO_LANGUAGE="fr" +` ); - const config = getConfig({ configPath: '', envPath: envFile.filePath }); + + const config = getConfig({ + configPath: globalConfigFile.filePath, + envPath: localEnvFile.filePath + }); expect(config).not.toEqual(null); - expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key'); - expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key'); - expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192); - expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000); - expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api'); - expect(config!['OCO_DESCRIPTION']).toEqual(true); - expect(config!['OCO_EMOJI']).toEqual(true); + expect(config!['OCO_OPENAI_API_KEY']).toEqual('local-key'); + expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('local-anthropic-key'); + expect(config!['OCO_MODEL']).toEqual('gpt-3.5-turbo'); + expect(config!['OCO_LANGUAGE']).toEqual('fr'); + }); + + it('should fallback to global config when local config is not set', async () => { + globalConfigFile = await generateConfig( + '.opencommit', + ` +OCO_OPENAI_API_KEY="global-key" +OCO_MODEL="gpt-4" +OCO_LANGUAGE="de" +OCO_DESCRIPTION="true" +` + ); + + localEnvFile = await generateConfig( + '.env', + ` +OCO_ANTHROPIC_API_KEY="local-anthropic-key" +` + ); + + const config = getConfig({ + configPath: globalConfigFile.filePath, + envPath: localEnvFile.filePath + }); + + expect(config).not.toEqual(null); + expect(config!['OCO_OPENAI_API_KEY']).toEqual('global-key'); + expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('local-anthropic-key'); expect(config!['OCO_MODEL']).toEqual('gpt-4'); expect(config!['OCO_LANGUAGE']).toEqual('de'); - expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m'); - expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint'); - expect(() => ['ollama', 'gemini'].includes(config!['OCO_AI_PROVIDER'])).toBeTruthy(); - expect(config!['OCO_GITPUSH']).toEqual(false); - expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true); - - await envFile.cleanup(); + expect(config!['OCO_DESCRIPTION']).toEqual(true); }); -}); \ No newline at end of file + + it('should handle boolean and numeric values correctly', async () => { + globalConfigFile = await generateConfig( + '.opencommit', + ` +OCO_TOKENS_MAX_INPUT="4096" +OCO_TOKENS_MAX_OUTPUT="500" +OCO_GITPUSH="true" +` + ); + + localEnvFile = await generateConfig( + '.env', + ` +OCO_TOKENS_MAX_INPUT="8192" +OCO_ONE_LINE_COMMIT="false" +` + ); + + const config = getConfig({ + configPath: globalConfigFile.filePath, + envPath: localEnvFile.filePath + }); + + expect(config).not.toEqual(null); + expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192); + expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(500); + expect(config!['OCO_GITPUSH']).toEqual(true); + expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(false); + }); + + it('should handle empty local config correctly', async () => { + globalConfigFile = await generateConfig( + '.opencommit', + ` +OCO_OPENAI_API_KEY="global-key" +OCO_MODEL="gpt-4" +OCO_LANGUAGE="es" +` + ); + + localEnvFile = await generateConfig('.env', ''); + + const config = getConfig({ + configPath: globalConfigFile.filePath, + envPath: localEnvFile.filePath + }); + + expect(config).not.toEqual(null); + expect(config!['OCO_OPENAI_API_KEY']).toEqual('global-key'); + expect(config!['OCO_MODEL']).toEqual('gpt-4'); + expect(config!['OCO_LANGUAGE']).toEqual('es'); + }); +}); diff --git a/test/unit/gemini.test.ts b/test/unit/gemini.test.ts index 8b7fd9d..885688d 100644 --- a/test/unit/gemini.test.ts +++ b/test/unit/gemini.test.ts @@ -1,7 +1,8 @@ import { Gemini } from '../../src/engine/gemini'; -import { ChatCompletionRequestMessage } from 'openai'; + import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai'; import { ConfigType, getConfig } from '../../src/commands/config'; +import { OpenAI } from 'openai'; describe('Gemini', () => { let gemini: Gemini; @@ -9,45 +10,49 @@ describe('Gemini', () => { let mockGoogleGenerativeAi: GoogleGenerativeAI; let mockGenerativeModel: GenerativeModel; let mockExit: jest.SpyInstance; - let mockWarmup: jest.SpyInstance; - - const noop: (code?: number | undefined) => never = (code?: number | undefined) => {}; - + + const noop: (...args: any[]) => any = (...args: any[]) => {}; + const mockGemini = () => { - gemini = new Gemini(); - } - + gemini = new Gemini({ + apiKey: mockConfig.OCO_GEMINI_API_KEY, + model: mockConfig.OCO_MODEL + }); + }; + const oldEnv = process.env; beforeEach(() => { jest.resetModules(); process.env = { ...oldEnv }; - + jest.mock('@google/generative-ai'); jest.mock('../src/commands/config'); - + jest.mock('@clack/prompts', () => ({ intro: jest.fn(), - outro: jest.fn(), + outro: jest.fn() })); - - if (mockWarmup) mockWarmup.mockRestore(); - + mockExit = jest.spyOn(process, 'exit').mockImplementation(); mockConfig = getConfig() as ConfigType; - + mockConfig.OCO_AI_PROVIDER = 'gemini'; mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key'; mockConfig.OCO_MODEL = 'gemini-1.5-flash'; - - mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_GEMINI_API_KEY); - mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({ model: mockConfig.OCO_MODEL, }); + + mockGoogleGenerativeAi = new GoogleGenerativeAI( + mockConfig.OCO_GEMINI_API_KEY + ); + mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({ + model: mockConfig.OCO_MODEL + }); }); - + afterEach(() => { gemini = undefined as any; - }) - + }); + afterAll(() => { mockExit.mockRestore(); process.env = oldEnv; @@ -59,18 +64,12 @@ describe('Gemini', () => { expect(gemini).toBeDefined(); }); - it('should warmup correctly', () => { - mockWarmup = jest.spyOn(Gemini.prototype as any, 'warmup').mockImplementation(noop); - mockGemini(); - expect(gemini).toBeDefined(); - }); - it('should exit process if OCO_GEMINI_API_KEY is not set and command is not config', () => { process.env.OCO_GEMINI_API_KEY = undefined; process.env.OCO_AI_PROVIDER = 'gemini'; - + mockGemini(); - + expect(mockExit).toHaveBeenCalledWith(1); }); @@ -82,24 +81,26 @@ describe('Gemini', () => { expect(mockExit).toHaveBeenCalledWith(1); }); - + it('should generate commit message', async () => { - const mockGenerateContent = jest.fn().mockResolvedValue({ response: { text: () => 'generated content' } }); + const mockGenerateContent = jest + .fn() + .mockResolvedValue({ response: { text: () => 'generated content' } }); mockGenerativeModel.generateContent = mockGenerateContent; - - mockWarmup = jest.spyOn(Gemini.prototype as any, 'warmup').mockImplementation(noop); + mockGemini(); - - const messages: ChatCompletionRequestMessage[] = [ - { role: 'system', content: 'system message' }, - { role: 'assistant', content: 'assistant message' }, - ]; - - jest.spyOn(gemini, 'generateCommitMessage').mockImplementation(async () => 'generated content'); + + const messages: Array = + [ + { role: 'system', content: 'system message' }, + { role: 'assistant', content: 'assistant message' } + ]; + + jest + .spyOn(gemini, 'generateCommitMessage') + .mockImplementation(async () => 'generated content'); const result = await gemini.generateCommitMessage(messages); expect(result).toEqual('generated content'); - expect(mockWarmup).toHaveBeenCalled(); }); - -}); \ No newline at end of file +});