mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-12 23:28:16 -05:00
Compare commits
2 Commits
378_fix_ho
...
v3.2.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2859d4ebe3 | ||
|
|
306522e796 |
61
README.md
61
README.md
@@ -28,30 +28,19 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
|
||||
npm install -g opencommit
|
||||
```
|
||||
|
||||
Alternatively run it via `npx opencommit` or `bunx opencommit`
|
||||
|
||||
MacOS may ask to run the command with `sudo` when installing a package globally.
|
||||
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys) or other supported LLM providers (we support them all). Make sure that you add your OpenAI payment details to your account, so the API works.
|
||||
|
||||
3. Set the key to OpenCommit config:
|
||||
|
||||
```sh
|
||||
oco config set OCO_OPENAI_API_KEY=<your_api_key>
|
||||
oco config set OCO_API_KEY=<your_api_key>
|
||||
```
|
||||
|
||||
Your API key is stored locally in the `~/.opencommit` config file.
|
||||
|
||||
## Usage
|
||||
|
||||
You can call OpenCommit directly to generate a commit message for your staged changes:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
opencommit
|
||||
```
|
||||
|
||||
You can also use the `oco` shortcut:
|
||||
You can call OpenCommit with `oco` command to generate a commit message for your staged changes:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
@@ -70,22 +59,17 @@ You can also run it with local model through ollama:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
oco config set OCO_AI_PROVIDER='ollama'
|
||||
oco config set OCO_AI_PROVIDER='ollama' OCO_MODEL='llama3:8b'
|
||||
```
|
||||
|
||||
If you want to use a model other than mistral (default), you can do so by setting the `OCO_AI_PROVIDER` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_AI_PROVIDER='ollama'
|
||||
oco config set OCO_MODEL='llama3:8b'
|
||||
```
|
||||
Default model is `mistral`.
|
||||
|
||||
If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
|
||||
|
||||
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
|
||||
You can do so by setting the `OCO_API_URL` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
```
|
||||
|
||||
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
|
||||
@@ -122,10 +106,11 @@ Create a `.env` file and add OpenCommit config variables there like this:
|
||||
|
||||
```env
|
||||
...
|
||||
OCO_OPENAI_API_KEY=<your OpenAI API token>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
|
||||
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
|
||||
OCO_API_URL=<may be used to set proxy path to OpenAI api>
|
||||
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
|
||||
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
|
||||
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
|
||||
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
|
||||
OCO_EMOJI=<boolean, add GitMoji>
|
||||
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
|
||||
@@ -133,11 +118,9 @@ OCO_LANGUAGE=<locale, scroll to the bottom to see options>
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
|
||||
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
|
||||
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama>
|
||||
...
|
||||
```
|
||||
|
||||
This are not all the config options, but you get the point.
|
||||
Global configs are same as local configs, but they are stored in the global `~/.opencommit` config file and set with `oco config set` command, e.g. `oco config set OCO_MODEL=gpt-4o`.
|
||||
|
||||
### Global config for all repos
|
||||
|
||||
@@ -189,26 +172,26 @@ or for as a cheaper option:
|
||||
oco config set OCO_MODEL=gpt-3.5-turbo
|
||||
```
|
||||
|
||||
### Switch to Azure OpenAI
|
||||
### Switch to other LLM providers with a custom URL
|
||||
|
||||
By default OpenCommit uses [OpenAI](https://openai.com).
|
||||
|
||||
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
|
||||
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/) or Flowise or Ollama.
|
||||
|
||||
```sh
|
||||
opencommit config set OCO_AI_PROVIDER=azure
|
||||
```
|
||||
oco config set OCO_AI_PROVIDER=azure OCO_API_KEY=<your_azure_api_key> OCO_API_URL=<your_azure_endpoint>
|
||||
|
||||
Of course need to set 'OCO_OPENAI_API_KEY'. And also need to set the
|
||||
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
|
||||
'model'.
|
||||
oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_API_URL=<your_flowise_endpoint>
|
||||
|
||||
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
|
||||
```
|
||||
|
||||
### Locale configuration
|
||||
|
||||
To globally specify the language used to generate commit messages:
|
||||
|
||||
```sh
|
||||
# de, German ,Deutsch
|
||||
# de, German, Deutsch
|
||||
oco config set OCO_LANGUAGE=de
|
||||
oco config set OCO_LANGUAGE=German
|
||||
oco config set OCO_LANGUAGE=Deutsch
|
||||
@@ -224,12 +207,14 @@ All available languages are currently listed in the [i18n](https://github.com/di
|
||||
|
||||
### Push to git (gonna be deprecated)
|
||||
|
||||
A prompt to ushing to git is on by default but if you would like to turn it off just use:
|
||||
A prompt for pushing to git is on by default but if you would like to turn it off just use:
|
||||
|
||||
```sh
|
||||
oco config set OCO_GITPUSH=false
|
||||
```
|
||||
|
||||
and it will exit right after commit is confirmed without asking if you would like to push to remote.
|
||||
|
||||
### Switch to `@commitlint`
|
||||
|
||||
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
|
||||
@@ -404,7 +389,7 @@ jobs:
|
||||
# set openAI api key in repo actions secrets,
|
||||
# for openAI keys go to: https://platform.openai.com/account/api-keys
|
||||
# for repo secret go to: <your_repo_url>/settings/secrets/actions
|
||||
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
|
||||
OCO_API_KEY: ${{ secrets.OCO_API_KEY }}
|
||||
|
||||
# customization
|
||||
OCO_TOKENS_MAX_INPUT: 4096
|
||||
|
||||
418
out/cli.cjs
418
out/cli.cjs
@@ -144,7 +144,7 @@ var require_windows = __commonJS({
|
||||
"node_modules/isexe/windows.js"(exports, module2) {
|
||||
module2.exports = isexe;
|
||||
isexe.sync = sync;
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
function checkPathExt(path5, options) {
|
||||
var pathext = options.pathExt !== void 0 ? options.pathExt : process.env.PATHEXT;
|
||||
if (!pathext) {
|
||||
@@ -169,12 +169,12 @@ var require_windows = __commonJS({
|
||||
return checkPathExt(path5, options);
|
||||
}
|
||||
function isexe(path5, options, cb) {
|
||||
fs6.stat(path5, function(er2, stat) {
|
||||
fs7.stat(path5, function(er2, stat) {
|
||||
cb(er2, er2 ? false : checkStat(stat, path5, options));
|
||||
});
|
||||
}
|
||||
function sync(path5, options) {
|
||||
return checkStat(fs6.statSync(path5), path5, options);
|
||||
return checkStat(fs7.statSync(path5), path5, options);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -184,14 +184,14 @@ var require_mode = __commonJS({
|
||||
"node_modules/isexe/mode.js"(exports, module2) {
|
||||
module2.exports = isexe;
|
||||
isexe.sync = sync;
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
function isexe(path5, options, cb) {
|
||||
fs6.stat(path5, function(er2, stat) {
|
||||
fs7.stat(path5, function(er2, stat) {
|
||||
cb(er2, er2 ? false : checkStat(stat, options));
|
||||
});
|
||||
}
|
||||
function sync(path5, options) {
|
||||
return checkStat(fs6.statSync(path5), options);
|
||||
return checkStat(fs7.statSync(path5), options);
|
||||
}
|
||||
function checkStat(stat, options) {
|
||||
return stat.isFile() && checkMode(stat, options);
|
||||
@@ -215,7 +215,7 @@ var require_mode = __commonJS({
|
||||
// node_modules/isexe/index.js
|
||||
var require_isexe = __commonJS({
|
||||
"node_modules/isexe/index.js"(exports, module2) {
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
var core;
|
||||
if (process.platform === "win32" || global.TESTING_WINDOWS) {
|
||||
core = require_windows();
|
||||
@@ -477,16 +477,16 @@ var require_shebang_command = __commonJS({
|
||||
var require_readShebang = __commonJS({
|
||||
"node_modules/cross-spawn/lib/util/readShebang.js"(exports, module2) {
|
||||
"use strict";
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
var shebangCommand = require_shebang_command();
|
||||
function readShebang(command) {
|
||||
const size = 150;
|
||||
const buffer = Buffer.alloc(size);
|
||||
let fd;
|
||||
try {
|
||||
fd = fs6.openSync(command, "r");
|
||||
fs6.readSync(fd, buffer, 0, size, 0);
|
||||
fs6.closeSync(fd);
|
||||
fd = fs7.openSync(command, "r");
|
||||
fs7.readSync(fd, buffer, 0, size, 0);
|
||||
fs7.closeSync(fd);
|
||||
} catch (e3) {
|
||||
}
|
||||
return shebangCommand(buffer.toString());
|
||||
@@ -1040,7 +1040,7 @@ var require_package = __commonJS({
|
||||
// node_modules/dotenv/lib/main.js
|
||||
var require_main = __commonJS({
|
||||
"node_modules/dotenv/lib/main.js"(exports, module2) {
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
var path5 = require("path");
|
||||
var os4 = require("os");
|
||||
var crypto2 = require("crypto");
|
||||
@@ -1147,7 +1147,7 @@ var require_main = __commonJS({
|
||||
if (options && options.path && options.path.length > 0) {
|
||||
if (Array.isArray(options.path)) {
|
||||
for (const filepath of options.path) {
|
||||
if (fs6.existsSync(filepath)) {
|
||||
if (fs7.existsSync(filepath)) {
|
||||
possibleVaultPath = filepath.endsWith(".vault") ? filepath : `${filepath}.vault`;
|
||||
}
|
||||
}
|
||||
@@ -1157,7 +1157,7 @@ var require_main = __commonJS({
|
||||
} else {
|
||||
possibleVaultPath = path5.resolve(process.cwd(), ".env.vault");
|
||||
}
|
||||
if (fs6.existsSync(possibleVaultPath)) {
|
||||
if (fs7.existsSync(possibleVaultPath)) {
|
||||
return possibleVaultPath;
|
||||
}
|
||||
return null;
|
||||
@@ -1201,7 +1201,7 @@ var require_main = __commonJS({
|
||||
const parsedAll = {};
|
||||
for (const path6 of optionPaths) {
|
||||
try {
|
||||
const parsed = DotenvModule.parse(fs6.readFileSync(path6, { encoding }));
|
||||
const parsed = DotenvModule.parse(fs7.readFileSync(path6, { encoding }));
|
||||
DotenvModule.populate(parsedAll, parsed, options);
|
||||
} catch (e3) {
|
||||
if (debug3) {
|
||||
@@ -21024,7 +21024,7 @@ var require_form_data = __commonJS({
|
||||
var http3 = require("http");
|
||||
var https3 = require("https");
|
||||
var parseUrl = require("url").parse;
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
var Stream3 = require("stream").Stream;
|
||||
var mime = require_mime_types();
|
||||
var asynckit = require_asynckit();
|
||||
@@ -21089,7 +21089,7 @@ var require_form_data = __commonJS({
|
||||
if (value.end != void 0 && value.end != Infinity && value.start != void 0) {
|
||||
callback(null, value.end + 1 - (value.start ? value.start : 0));
|
||||
} else {
|
||||
fs6.stat(value.path, function(err, stat) {
|
||||
fs7.stat(value.path, function(err, stat) {
|
||||
var fileSize;
|
||||
if (err) {
|
||||
callback(err);
|
||||
@@ -22849,7 +22849,7 @@ var require_tiktoken = __commonJS({
|
||||
var imports = {};
|
||||
imports["./tiktoken_bg.js"] = wasm;
|
||||
var path5 = require("path");
|
||||
var fs6 = require("fs");
|
||||
var fs7 = require("fs");
|
||||
var candidates = __dirname.split(path5.sep).reduce((memo, _7, index, array) => {
|
||||
const prefix = array.slice(0, index + 1).join(path5.sep) + path5.sep;
|
||||
if (!prefix.includes("node_modules" + path5.sep)) {
|
||||
@@ -22869,7 +22869,7 @@ var require_tiktoken = __commonJS({
|
||||
var bytes = null;
|
||||
for (const candidate of candidates) {
|
||||
try {
|
||||
bytes = fs6.readFileSync(candidate);
|
||||
bytes = fs7.readFileSync(candidate);
|
||||
break;
|
||||
} catch {
|
||||
}
|
||||
@@ -27419,7 +27419,6 @@ var package_default = {
|
||||
"@google/generative-ai": "^0.11.4",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
"@octokit/webhooks-types": "^6.11.0",
|
||||
ai: "^2.2.14",
|
||||
axios: "^1.3.4",
|
||||
chalk: "^5.2.0",
|
||||
cleye: "^1.3.2",
|
||||
@@ -27428,7 +27427,7 @@ var package_default = {
|
||||
ignore: "^5.2.4",
|
||||
ini: "^3.0.1",
|
||||
inquirer: "^9.1.4",
|
||||
openai: "^4.56.0"
|
||||
openai: "^4.57.0"
|
||||
}
|
||||
};
|
||||
|
||||
@@ -29877,33 +29876,6 @@ function getI18nLocal(value) {
|
||||
}
|
||||
|
||||
// src/commands/config.ts
|
||||
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
|
||||
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
|
||||
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
|
||||
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
|
||||
CONFIG_KEYS2["OCO_GEMINI_API_KEY"] = "OCO_GEMINI_API_KEY";
|
||||
CONFIG_KEYS2["OCO_GEMINI_BASE_PATH"] = "OCO_GEMINI_BASE_PATH";
|
||||
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
|
||||
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
|
||||
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
|
||||
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
|
||||
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
|
||||
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
|
||||
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
|
||||
CONFIG_KEYS2["OCO_WHY"] = "OCO_WHY";
|
||||
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
|
||||
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
|
||||
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
|
||||
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
|
||||
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
|
||||
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
|
||||
CONFIG_KEYS2["OCO_TEST_MOCK_TYPE"] = "OCO_TEST_MOCK_TYPE";
|
||||
CONFIG_KEYS2["OCO_API_URL"] = "OCO_API_URL";
|
||||
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_OLLAMA_API_URL";
|
||||
CONFIG_KEYS2["OCO_FLOWISE_ENDPOINT"] = "OCO_FLOWISE_ENDPOINT";
|
||||
CONFIG_KEYS2["OCO_FLOWISE_API_KEY"] = "OCO_FLOWISE_API_KEY";
|
||||
return CONFIG_KEYS2;
|
||||
})(CONFIG_KEYS || {});
|
||||
var MODEL_LIST = {
|
||||
openai: [
|
||||
"gpt-4o-mini",
|
||||
@@ -29969,56 +29941,18 @@ var validateConfig = (key, condition, validationMessage) => {
|
||||
}
|
||||
};
|
||||
var configValidators = {
|
||||
["OCO_OPENAI_API_KEY" /* OCO_OPENAI_API_KEY */](value, config7 = {}) {
|
||||
["OCO_API_KEY" /* OCO_API_KEY */](value, config7 = {}) {
|
||||
if (config7.OCO_AI_PROVIDER !== "openai")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_OPENAI_API_KEY",
|
||||
"OCO_API_KEY",
|
||||
typeof value === "string" && value.length > 0,
|
||||
"Empty value is not allowed"
|
||||
);
|
||||
validateConfig(
|
||||
"OCO_OPENAI_API_KEY",
|
||||
"OCO_API_KEY",
|
||||
value,
|
||||
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_AZURE_API_KEY" /* OCO_AZURE_API_KEY */](value, config7 = {}) {
|
||||
if (config7.OCO_AI_PROVIDER !== "azure")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_AZURE_API_KEY",
|
||||
!!value,
|
||||
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_GEMINI_API_KEY" /* OCO_GEMINI_API_KEY */](value, config7 = {}) {
|
||||
if (config7.OCO_AI_PROVIDER !== "gemini")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_GEMINI_API_KEY",
|
||||
value || config7.OCO_GEMINI_API_KEY || config7.OCO_AI_PROVIDER === "test",
|
||||
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_ANTHROPIC_API_KEY" /* OCO_ANTHROPIC_API_KEY */](value, config7 = {}) {
|
||||
if (config7.OCO_AI_PROVIDER !== "anthropic")
|
||||
return value;
|
||||
validateConfig(
|
||||
"ANTHROPIC_API_KEY",
|
||||
!!value,
|
||||
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_FLOWISE_API_KEY" /* OCO_FLOWISE_API_KEY */](value, config7 = {}) {
|
||||
validateConfig(
|
||||
"OCO_FLOWISE_API_KEY" /* OCO_FLOWISE_API_KEY */,
|
||||
value || config7.OCO_AI_PROVIDER !== "flowise",
|
||||
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -30065,11 +29999,11 @@ var configValidators = {
|
||||
);
|
||||
return getI18nLocal(value);
|
||||
},
|
||||
["OCO_OPENAI_BASE_PATH" /* OCO_OPENAI_BASE_PATH */](value) {
|
||||
["OCO_API_URL" /* OCO_API_URL */](value) {
|
||||
validateConfig(
|
||||
"OCO_OPENAI_BASE_PATH" /* OCO_OPENAI_BASE_PATH */,
|
||||
"OCO_API_URL" /* OCO_API_URL */,
|
||||
typeof value === "string",
|
||||
"Must be string"
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -30131,22 +30065,6 @@ var configValidators = {
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_AZURE_ENDPOINT" /* OCO_AZURE_ENDPOINT */](value) {
|
||||
validateConfig(
|
||||
"OCO_AZURE_ENDPOINT" /* OCO_AZURE_ENDPOINT */,
|
||||
value.includes("openai.azure.com"),
|
||||
'Must be in format "https://<resource name>.openai.azure.com/"'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_FLOWISE_ENDPOINT" /* OCO_FLOWISE_ENDPOINT */](value) {
|
||||
validateConfig(
|
||||
"OCO_FLOWISE_ENDPOINT" /* OCO_FLOWISE_ENDPOINT */,
|
||||
typeof value === "string" && value.includes(":"),
|
||||
"Value must be string and should include both I.P. and port number"
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_TEST_MOCK_TYPE" /* OCO_TEST_MOCK_TYPE */](value) {
|
||||
validateConfig(
|
||||
"OCO_TEST_MOCK_TYPE" /* OCO_TEST_MOCK_TYPE */,
|
||||
@@ -30157,15 +30075,25 @@ var configValidators = {
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_OLLAMA_API_URL" /* OCO_OLLAMA_API_URL */](value) {
|
||||
["OCO_WHY" /* OCO_WHY */](value) {
|
||||
validateConfig(
|
||||
"OCO_OLLAMA_API_URL" /* OCO_OLLAMA_API_URL */,
|
||||
typeof value === "string" && value.startsWith("http"),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
"OCO_WHY" /* OCO_WHY */,
|
||||
typeof value === "boolean",
|
||||
"Must be true or false"
|
||||
);
|
||||
return value;
|
||||
}
|
||||
};
|
||||
var OCO_AI_PROVIDER_ENUM = /* @__PURE__ */ ((OCO_AI_PROVIDER_ENUM2) => {
|
||||
OCO_AI_PROVIDER_ENUM2["OLLAMA"] = "ollama";
|
||||
OCO_AI_PROVIDER_ENUM2["OPENAI"] = "openai";
|
||||
OCO_AI_PROVIDER_ENUM2["ANTHROPIC"] = "anthropic";
|
||||
OCO_AI_PROVIDER_ENUM2["GEMINI"] = "gemini";
|
||||
OCO_AI_PROVIDER_ENUM2["AZURE"] = "azure";
|
||||
OCO_AI_PROVIDER_ENUM2["TEST"] = "test";
|
||||
OCO_AI_PROVIDER_ENUM2["FLOWISE"] = "flowise";
|
||||
return OCO_AI_PROVIDER_ENUM2;
|
||||
})(OCO_AI_PROVIDER_ENUM || {});
|
||||
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
|
||||
var defaultEnvPath = (0, import_path.resolve)(process.cwd(), ".env");
|
||||
var DEFAULT_CONFIG = {
|
||||
@@ -30188,7 +30116,7 @@ var initGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(DEFAULT_CONFIG), "utf8");
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
var parseEnvVarValue = (value) => {
|
||||
var parseConfigVarValue = (value) => {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
@@ -30199,32 +30127,32 @@ var getEnvConfig = (envPath) => {
|
||||
dotenv.config({ path: envPath });
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
|
||||
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
|
||||
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
|
||||
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
|
||||
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
|
||||
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
|
||||
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
|
||||
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER,
|
||||
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue(
|
||||
process.env.OCO_TOKENS_MAX_OUTPUT
|
||||
),
|
||||
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER,
|
||||
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH)
|
||||
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH)
|
||||
};
|
||||
};
|
||||
var getGlobalConfig = (configPath) => {
|
||||
var setGlobalConfig = (config7, configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config7), "utf8");
|
||||
};
|
||||
var getIsGlobalConfigFileExist = (configPath = defaultConfigPath) => {
|
||||
return (0, import_fs.existsSync)(configPath);
|
||||
};
|
||||
var getGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
let globalConfig;
|
||||
const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath);
|
||||
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
|
||||
if (!isGlobalConfigFileExist)
|
||||
globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
@@ -30233,10 +30161,13 @@ var getGlobalConfig = (configPath) => {
|
||||
}
|
||||
return globalConfig;
|
||||
};
|
||||
var mergeConfigs = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
var mergeConfigs = (main, fallback) => {
|
||||
const allKeys = /* @__PURE__ */ new Set([...Object.keys(main), ...Object.keys(fallback)]);
|
||||
return Array.from(allKeys).reduce((acc, key) => {
|
||||
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
var getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
@@ -30250,6 +30181,7 @@ var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
|
||||
const config7 = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
const configToSet = {};
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join("\n");
|
||||
@@ -30263,7 +30195,10 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
}
|
||||
let parsedConfigValue;
|
||||
try {
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
if (typeof value === "string")
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
else
|
||||
parsedConfigValue = value;
|
||||
} catch (error) {
|
||||
parsedConfigValue = value;
|
||||
}
|
||||
@@ -30271,9 +30206,9 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
parsedConfigValue,
|
||||
config7
|
||||
);
|
||||
config7[key] = validValue;
|
||||
configToSet[key] = validValue;
|
||||
}
|
||||
(0, import_fs.writeFileSync)(globalConfigPath, (0, import_ini.stringify)(config7), "utf8");
|
||||
setGlobalConfig(mergeConfigs(configToSet, config7), globalConfigPath);
|
||||
ce(`${source_default.green("\u2714")} config successfully set`);
|
||||
};
|
||||
var configCommand = G3(
|
||||
@@ -39359,7 +39294,7 @@ var AzureEngine = class {
|
||||
};
|
||||
|
||||
// src/engine/flowise.ts
|
||||
var FlowiseAi = class {
|
||||
var FlowiseEngine = class {
|
||||
constructor(config7) {
|
||||
this.config = config7;
|
||||
this.client = axios_default.create({
|
||||
@@ -40154,7 +40089,7 @@ var GoogleGenerativeAI = class {
|
||||
};
|
||||
|
||||
// src/engine/gemini.ts
|
||||
var Gemini = class {
|
||||
var GeminiEngine = class {
|
||||
constructor(config7) {
|
||||
this.client = new GoogleGenerativeAI(config7.apiKey);
|
||||
this.config = config7;
|
||||
@@ -40212,7 +40147,7 @@ var Gemini = class {
|
||||
};
|
||||
|
||||
// src/engine/ollama.ts
|
||||
var OllamaAi = class {
|
||||
var OllamaEngine = class {
|
||||
constructor(config7) {
|
||||
this.config = config7;
|
||||
this.client = axios_default.create({
|
||||
@@ -44547,44 +44482,24 @@ function getEngine() {
|
||||
model: config7.OCO_MODEL,
|
||||
maxTokensOutput: config7.OCO_TOKENS_MAX_OUTPUT,
|
||||
maxTokensInput: config7.OCO_TOKENS_MAX_INPUT,
|
||||
baseURL: config7.OCO_OPENAI_BASE_PATH
|
||||
baseURL: config7.OCO_API_URL,
|
||||
apiKey: config7.OCO_API_KEY
|
||||
};
|
||||
switch (provider) {
|
||||
case "ollama" /* OLLAMA */:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: "",
|
||||
baseURL: config7.OCO_OLLAMA_API_URL
|
||||
});
|
||||
return new OllamaEngine(DEFAULT_CONFIG2);
|
||||
case "anthropic" /* ANTHROPIC */:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_ANTHROPIC_API_KEY
|
||||
});
|
||||
return new AnthropicEngine(DEFAULT_CONFIG2);
|
||||
case "test" /* TEST */:
|
||||
return new TestAi(config7.OCO_TEST_MOCK_TYPE);
|
||||
case "gemini" /* GEMINI */:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_GEMINI_API_KEY,
|
||||
baseURL: config7.OCO_GEMINI_BASE_PATH
|
||||
});
|
||||
return new GeminiEngine(DEFAULT_CONFIG2);
|
||||
case "azure" /* AZURE */:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_AZURE_API_KEY
|
||||
});
|
||||
return new AzureEngine(DEFAULT_CONFIG2);
|
||||
case "flowise" /* FLOWISE */:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG2,
|
||||
baseURL: config7.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG2.baseURL,
|
||||
apiKey: config7.OCO_FLOWISE_API_KEY
|
||||
});
|
||||
return new FlowiseEngine(DEFAULT_CONFIG2);
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_OPENAI_API_KEY
|
||||
});
|
||||
return new OpenAiEngine(DEFAULT_CONFIG2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45068,8 +44983,8 @@ function mergeDiffs(arr, maxStringLength) {
|
||||
|
||||
// src/generateCommitMessageFromGitDiff.ts
|
||||
var config5 = getConfig();
|
||||
var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */;
|
||||
var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */;
|
||||
var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT;
|
||||
var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT;
|
||||
var generateCommitMessageChatCompletionPrompt = async (diff, fullGitMojiSpec) => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
|
||||
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
|
||||
@@ -45401,13 +45316,13 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
|
||||
const pushSpinner = le();
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
const { stdout: stdout2 } = await execa("git", ["push", selectedRemote]);
|
||||
if (stdout2)
|
||||
ce(stdout2);
|
||||
pushSpinner.stop(
|
||||
`${source_default.green(
|
||||
"\u2714"
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
)} successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
if (stdout2)
|
||||
ce(stdout2);
|
||||
}
|
||||
} else {
|
||||
const regenerateMessage = await Q3({
|
||||
@@ -45635,9 +45550,9 @@ var prepareCommitMessageHook = async (isStageAllFlag = false) => {
|
||||
return;
|
||||
ae("opencommit");
|
||||
const config7 = getConfig();
|
||||
if (!config7.OCO_OPENAI_API_KEY && !config7.OCO_ANTHROPIC_API_KEY && !config7.OCO_AZURE_API_KEY) {
|
||||
if (!config7.OCO_API_KEY) {
|
||||
ce(
|
||||
"No OCO_OPENAI_API_KEY or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key via `oco config set <key>=<value>, e.g. `oco config set OCO_OPENAI_API_KEY=<value>`. For more info see https://github.com/di-sukharev/opencommit"
|
||||
"No OCO_API_KEY is set. Set your key via `oco config set OCO_API_KEY=<value>. For more info see https://github.com/di-sukharev/opencommit"
|
||||
);
|
||||
return;
|
||||
}
|
||||
@@ -45688,6 +45603,152 @@ Current version: ${currentVersion}. Latest version: ${latestVersion}.
|
||||
}
|
||||
};
|
||||
|
||||
// src/migrations/_run.ts
|
||||
var import_fs5 = __toESM(require("fs"), 1);
|
||||
var import_os2 = require("os");
|
||||
var import_path5 = require("path");
|
||||
|
||||
// src/migrations/00_use_single_api_key_and_url.ts
|
||||
function use_single_api_key_and_url_default() {
|
||||
const config7 = getConfig({ setDefaultValues: false });
|
||||
const aiProvider = config7.OCO_AI_PROVIDER;
|
||||
let apiKey;
|
||||
let apiUrl;
|
||||
if (aiProvider === "ollama" /* OLLAMA */) {
|
||||
apiKey = config7["OCO_OLLAMA_API_KEY"];
|
||||
apiUrl = config7["OCO_OLLAMA_API_URL"];
|
||||
} else if (aiProvider === "anthropic" /* ANTHROPIC */) {
|
||||
apiKey = config7["OCO_ANTHROPIC_API_KEY"];
|
||||
apiUrl = config7["OCO_ANTHROPIC_BASE_PATH"];
|
||||
} else if (aiProvider === "openai" /* OPENAI */) {
|
||||
apiKey = config7["OCO_OPENAI_API_KEY"];
|
||||
apiUrl = config7["OCO_OPENAI_BASE_PATH"];
|
||||
} else if (aiProvider === "azure" /* AZURE */) {
|
||||
apiKey = config7["OCO_AZURE_API_KEY"];
|
||||
apiUrl = config7["OCO_AZURE_ENDPOINT"];
|
||||
} else if (aiProvider === "gemini" /* GEMINI */) {
|
||||
apiKey = config7["OCO_GEMINI_API_KEY"];
|
||||
apiUrl = config7["OCO_GEMINI_BASE_PATH"];
|
||||
} else if (aiProvider === "flowise" /* FLOWISE */) {
|
||||
apiKey = config7["OCO_FLOWISE_API_KEY"];
|
||||
apiUrl = config7["OCO_FLOWISE_ENDPOINT"];
|
||||
} else {
|
||||
throw new Error(
|
||||
`Migration failed, set AI provider first. Run "oco config set OCO_AI_PROVIDER=<provider>", where <provider> is one of: ${Object.values(
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
).join(", ")}`
|
||||
);
|
||||
}
|
||||
if (apiKey)
|
||||
setConfig([["OCO_API_KEY" /* OCO_API_KEY */, apiKey]]);
|
||||
if (apiUrl)
|
||||
setConfig([["OCO_API_URL" /* OCO_API_URL */, apiUrl]]);
|
||||
}
|
||||
|
||||
// src/migrations/01_remove_obsolete_config_keys_from_global_file.ts
|
||||
function remove_obsolete_config_keys_from_global_file_default() {
|
||||
const obsoleteKeys = [
|
||||
"OCO_OLLAMA_API_KEY",
|
||||
"OCO_OLLAMA_API_URL",
|
||||
"OCO_ANTHROPIC_API_KEY",
|
||||
"OCO_ANTHROPIC_BASE_PATH",
|
||||
"OCO_OPENAI_API_KEY",
|
||||
"OCO_OPENAI_BASE_PATH",
|
||||
"OCO_AZURE_API_KEY",
|
||||
"OCO_AZURE_ENDPOINT",
|
||||
"OCO_GEMINI_API_KEY",
|
||||
"OCO_GEMINI_BASE_PATH",
|
||||
"OCO_FLOWISE_API_KEY",
|
||||
"OCO_FLOWISE_ENDPOINT"
|
||||
];
|
||||
const globalConfig = getGlobalConfig();
|
||||
const configToOverride = { ...globalConfig };
|
||||
for (const key of obsoleteKeys)
|
||||
delete configToOverride[key];
|
||||
setGlobalConfig(configToOverride);
|
||||
}
|
||||
|
||||
// src/migrations/02_set_missing_default_values.ts
|
||||
function set_missing_default_values_default() {
|
||||
const setDefaultConfigValues = (config7) => {
|
||||
const entriesToSet = [];
|
||||
for (const entry of Object.entries(DEFAULT_CONFIG)) {
|
||||
const [key, _value] = entry;
|
||||
if (config7[key] === "undefined")
|
||||
entriesToSet.push(entry);
|
||||
}
|
||||
if (entriesToSet.length > 0)
|
||||
setConfig(entriesToSet);
|
||||
};
|
||||
setDefaultConfigValues(getGlobalConfig());
|
||||
}
|
||||
|
||||
// src/migrations/_migrations.ts
|
||||
var migrations = [
|
||||
{
|
||||
name: "00_use_single_api_key_and_url",
|
||||
run: use_single_api_key_and_url_default
|
||||
},
|
||||
{
|
||||
name: "01_remove_obsolete_config_keys_from_global_file",
|
||||
run: remove_obsolete_config_keys_from_global_file_default
|
||||
},
|
||||
{
|
||||
name: "02_set_missing_default_values",
|
||||
run: set_missing_default_values_default
|
||||
}
|
||||
];
|
||||
|
||||
// src/migrations/_run.ts
|
||||
var migrationsFile = (0, import_path5.join)((0, import_os2.homedir)(), ".opencommit_migrations");
|
||||
var getCompletedMigrations = () => {
|
||||
if (!import_fs5.default.existsSync(migrationsFile)) {
|
||||
return [];
|
||||
}
|
||||
const data = import_fs5.default.readFileSync(migrationsFile, "utf-8");
|
||||
return data ? JSON.parse(data) : [];
|
||||
};
|
||||
var saveCompletedMigration = (migrationName) => {
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
completedMigrations.push(migrationName);
|
||||
import_fs5.default.writeFileSync(
|
||||
migrationsFile,
|
||||
JSON.stringify(completedMigrations, null, 2)
|
||||
);
|
||||
};
|
||||
var runMigrations = async () => {
|
||||
if (!getIsGlobalConfigFileExist())
|
||||
return;
|
||||
const config7 = getConfig();
|
||||
if (config7.OCO_AI_PROVIDER === "test" /* TEST */)
|
||||
return;
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
let isMigrated = false;
|
||||
for (const migration of migrations) {
|
||||
if (!completedMigrations.includes(migration.name)) {
|
||||
try {
|
||||
console.log("Applying migration", migration.name);
|
||||
migration.run();
|
||||
console.log("Migration applied successfully", migration.name);
|
||||
saveCompletedMigration(migration.name);
|
||||
} catch (error) {
|
||||
ce(
|
||||
`${source_default.red("Failed to apply migration")} ${migration.name}: ${error}`
|
||||
);
|
||||
}
|
||||
isMigrated = true;
|
||||
}
|
||||
}
|
||||
if (isMigrated) {
|
||||
ce(
|
||||
`${source_default.green(
|
||||
"\u2714"
|
||||
)} Migrations to your config were applied successfully. Please rerun.`
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
};
|
||||
|
||||
// src/cli.ts
|
||||
var extraArgs = process.argv.slice(2);
|
||||
Z2(
|
||||
@@ -45708,6 +45769,7 @@ Z2(
|
||||
help: { description: package_default.description }
|
||||
},
|
||||
async ({ flags }) => {
|
||||
await runMigrations();
|
||||
await checkIsLatestVersion();
|
||||
if (await isHookCalled()) {
|
||||
prepareCommitMessageHook();
|
||||
|
||||
@@ -48689,33 +48689,6 @@ function getI18nLocal(value) {
|
||||
}
|
||||
|
||||
// src/commands/config.ts
|
||||
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
|
||||
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
|
||||
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
|
||||
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
|
||||
CONFIG_KEYS2["OCO_GEMINI_API_KEY"] = "OCO_GEMINI_API_KEY";
|
||||
CONFIG_KEYS2["OCO_GEMINI_BASE_PATH"] = "OCO_GEMINI_BASE_PATH";
|
||||
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
|
||||
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
|
||||
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
|
||||
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
|
||||
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
|
||||
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
|
||||
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
|
||||
CONFIG_KEYS2["OCO_WHY"] = "OCO_WHY";
|
||||
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
|
||||
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
|
||||
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
|
||||
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
|
||||
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
|
||||
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
|
||||
CONFIG_KEYS2["OCO_TEST_MOCK_TYPE"] = "OCO_TEST_MOCK_TYPE";
|
||||
CONFIG_KEYS2["OCO_API_URL"] = "OCO_API_URL";
|
||||
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_OLLAMA_API_URL";
|
||||
CONFIG_KEYS2["OCO_FLOWISE_ENDPOINT"] = "OCO_FLOWISE_ENDPOINT";
|
||||
CONFIG_KEYS2["OCO_FLOWISE_API_KEY"] = "OCO_FLOWISE_API_KEY";
|
||||
return CONFIG_KEYS2;
|
||||
})(CONFIG_KEYS || {});
|
||||
var MODEL_LIST = {
|
||||
openai: [
|
||||
"gpt-4o-mini",
|
||||
@@ -48781,56 +48754,18 @@ var validateConfig = (key, condition, validationMessage) => {
|
||||
}
|
||||
};
|
||||
var configValidators = {
|
||||
["OCO_OPENAI_API_KEY" /* OCO_OPENAI_API_KEY */](value, config6 = {}) {
|
||||
["OCO_API_KEY" /* OCO_API_KEY */](value, config6 = {}) {
|
||||
if (config6.OCO_AI_PROVIDER !== "openai")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_OPENAI_API_KEY",
|
||||
"OCO_API_KEY",
|
||||
typeof value === "string" && value.length > 0,
|
||||
"Empty value is not allowed"
|
||||
);
|
||||
validateConfig(
|
||||
"OCO_OPENAI_API_KEY",
|
||||
"OCO_API_KEY",
|
||||
value,
|
||||
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_AZURE_API_KEY" /* OCO_AZURE_API_KEY */](value, config6 = {}) {
|
||||
if (config6.OCO_AI_PROVIDER !== "azure")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_AZURE_API_KEY",
|
||||
!!value,
|
||||
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_GEMINI_API_KEY" /* OCO_GEMINI_API_KEY */](value, config6 = {}) {
|
||||
if (config6.OCO_AI_PROVIDER !== "gemini")
|
||||
return value;
|
||||
validateConfig(
|
||||
"OCO_GEMINI_API_KEY",
|
||||
value || config6.OCO_GEMINI_API_KEY || config6.OCO_AI_PROVIDER === "test",
|
||||
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_ANTHROPIC_API_KEY" /* OCO_ANTHROPIC_API_KEY */](value, config6 = {}) {
|
||||
if (config6.OCO_AI_PROVIDER !== "anthropic")
|
||||
return value;
|
||||
validateConfig(
|
||||
"ANTHROPIC_API_KEY",
|
||||
!!value,
|
||||
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_FLOWISE_API_KEY" /* OCO_FLOWISE_API_KEY */](value, config6 = {}) {
|
||||
validateConfig(
|
||||
"OCO_FLOWISE_API_KEY" /* OCO_FLOWISE_API_KEY */,
|
||||
value || config6.OCO_AI_PROVIDER !== "flowise",
|
||||
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -48877,11 +48812,11 @@ var configValidators = {
|
||||
);
|
||||
return getI18nLocal(value);
|
||||
},
|
||||
["OCO_OPENAI_BASE_PATH" /* OCO_OPENAI_BASE_PATH */](value) {
|
||||
["OCO_API_URL" /* OCO_API_URL */](value) {
|
||||
validateConfig(
|
||||
"OCO_OPENAI_BASE_PATH" /* OCO_OPENAI_BASE_PATH */,
|
||||
"OCO_API_URL" /* OCO_API_URL */,
|
||||
typeof value === "string",
|
||||
"Must be string"
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -48943,22 +48878,6 @@ var configValidators = {
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_AZURE_ENDPOINT" /* OCO_AZURE_ENDPOINT */](value) {
|
||||
validateConfig(
|
||||
"OCO_AZURE_ENDPOINT" /* OCO_AZURE_ENDPOINT */,
|
||||
value.includes("openai.azure.com"),
|
||||
'Must be in format "https://<resource name>.openai.azure.com/"'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_FLOWISE_ENDPOINT" /* OCO_FLOWISE_ENDPOINT */](value) {
|
||||
validateConfig(
|
||||
"OCO_FLOWISE_ENDPOINT" /* OCO_FLOWISE_ENDPOINT */,
|
||||
typeof value === "string" && value.includes(":"),
|
||||
"Value must be string and should include both I.P. and port number"
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_TEST_MOCK_TYPE" /* OCO_TEST_MOCK_TYPE */](value) {
|
||||
validateConfig(
|
||||
"OCO_TEST_MOCK_TYPE" /* OCO_TEST_MOCK_TYPE */,
|
||||
@@ -48969,11 +48888,11 @@ var configValidators = {
|
||||
);
|
||||
return value;
|
||||
},
|
||||
["OCO_OLLAMA_API_URL" /* OCO_OLLAMA_API_URL */](value) {
|
||||
["OCO_WHY" /* OCO_WHY */](value) {
|
||||
validateConfig(
|
||||
"OCO_OLLAMA_API_URL" /* OCO_OLLAMA_API_URL */,
|
||||
typeof value === "string" && value.startsWith("http"),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
"OCO_WHY" /* OCO_WHY */,
|
||||
typeof value === "boolean",
|
||||
"Must be true or false"
|
||||
);
|
||||
return value;
|
||||
}
|
||||
@@ -49000,7 +48919,7 @@ var initGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(DEFAULT_CONFIG), "utf8");
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
var parseEnvVarValue = (value) => {
|
||||
var parseConfigVarValue = (value) => {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
@@ -49011,32 +48930,32 @@ var getEnvConfig = (envPath) => {
|
||||
dotenv.config({ path: envPath });
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
|
||||
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
|
||||
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
|
||||
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
|
||||
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
|
||||
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
|
||||
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
|
||||
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER,
|
||||
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue(
|
||||
process.env.OCO_TOKENS_MAX_OUTPUT
|
||||
),
|
||||
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER,
|
||||
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH)
|
||||
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH)
|
||||
};
|
||||
};
|
||||
var getGlobalConfig = (configPath) => {
|
||||
var setGlobalConfig = (config6, configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config6), "utf8");
|
||||
};
|
||||
var getIsGlobalConfigFileExist = (configPath = defaultConfigPath) => {
|
||||
return (0, import_fs.existsSync)(configPath);
|
||||
};
|
||||
var getGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
let globalConfig;
|
||||
const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath);
|
||||
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
|
||||
if (!isGlobalConfigFileExist)
|
||||
globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
@@ -49045,10 +48964,13 @@ var getGlobalConfig = (configPath) => {
|
||||
}
|
||||
return globalConfig;
|
||||
};
|
||||
var mergeConfigs = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
var mergeConfigs = (main, fallback) => {
|
||||
const allKeys = /* @__PURE__ */ new Set([...Object.keys(main), ...Object.keys(fallback)]);
|
||||
return Array.from(allKeys).reduce((acc, key) => {
|
||||
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
};
|
||||
var getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
@@ -49062,6 +48984,7 @@ var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
|
||||
const config6 = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
const configToSet = {};
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join("\n");
|
||||
@@ -49075,7 +48998,10 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
}
|
||||
let parsedConfigValue;
|
||||
try {
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
if (typeof value === "string")
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
else
|
||||
parsedConfigValue = value;
|
||||
} catch (error) {
|
||||
parsedConfigValue = value;
|
||||
}
|
||||
@@ -49083,9 +49009,9 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
parsedConfigValue,
|
||||
config6
|
||||
);
|
||||
config6[key] = validValue;
|
||||
configToSet[key] = validValue;
|
||||
}
|
||||
(0, import_fs.writeFileSync)(globalConfigPath, (0, import_ini.stringify)(config6), "utf8");
|
||||
setGlobalConfig(mergeConfigs(configToSet, config6), globalConfigPath);
|
||||
ce(`${source_default.green("\u2714")} config successfully set`);
|
||||
};
|
||||
var configCommand = G2(
|
||||
@@ -58171,7 +58097,7 @@ var AzureEngine = class {
|
||||
};
|
||||
|
||||
// src/engine/flowise.ts
|
||||
var FlowiseAi = class {
|
||||
var FlowiseEngine = class {
|
||||
constructor(config6) {
|
||||
this.config = config6;
|
||||
this.client = axios_default.create({
|
||||
@@ -58966,7 +58892,7 @@ var GoogleGenerativeAI = class {
|
||||
};
|
||||
|
||||
// src/engine/gemini.ts
|
||||
var Gemini = class {
|
||||
var GeminiEngine = class {
|
||||
constructor(config6) {
|
||||
this.client = new GoogleGenerativeAI(config6.apiKey);
|
||||
this.config = config6;
|
||||
@@ -59024,7 +58950,7 @@ var Gemini = class {
|
||||
};
|
||||
|
||||
// src/engine/ollama.ts
|
||||
var OllamaAi = class {
|
||||
var OllamaEngine = class {
|
||||
constructor(config6) {
|
||||
this.config = config6;
|
||||
this.client = axios_default.create({
|
||||
@@ -63359,44 +63285,24 @@ function getEngine() {
|
||||
model: config6.OCO_MODEL,
|
||||
maxTokensOutput: config6.OCO_TOKENS_MAX_OUTPUT,
|
||||
maxTokensInput: config6.OCO_TOKENS_MAX_INPUT,
|
||||
baseURL: config6.OCO_OPENAI_BASE_PATH
|
||||
baseURL: config6.OCO_API_URL,
|
||||
apiKey: config6.OCO_API_KEY
|
||||
};
|
||||
switch (provider) {
|
||||
case "ollama" /* OLLAMA */:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: "",
|
||||
baseURL: config6.OCO_OLLAMA_API_URL
|
||||
});
|
||||
return new OllamaEngine(DEFAULT_CONFIG2);
|
||||
case "anthropic" /* ANTHROPIC */:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_ANTHROPIC_API_KEY
|
||||
});
|
||||
return new AnthropicEngine(DEFAULT_CONFIG2);
|
||||
case "test" /* TEST */:
|
||||
return new TestAi(config6.OCO_TEST_MOCK_TYPE);
|
||||
case "gemini" /* GEMINI */:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_GEMINI_API_KEY,
|
||||
baseURL: config6.OCO_GEMINI_BASE_PATH
|
||||
});
|
||||
return new GeminiEngine(DEFAULT_CONFIG2);
|
||||
case "azure" /* AZURE */:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_AZURE_API_KEY
|
||||
});
|
||||
return new AzureEngine(DEFAULT_CONFIG2);
|
||||
case "flowise" /* FLOWISE */:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG2,
|
||||
baseURL: config6.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG2.baseURL,
|
||||
apiKey: config6.OCO_FLOWISE_API_KEY
|
||||
});
|
||||
return new FlowiseEngine(DEFAULT_CONFIG2);
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_OPENAI_API_KEY
|
||||
});
|
||||
return new OpenAiEngine(DEFAULT_CONFIG2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63880,8 +63786,8 @@ function mergeDiffs(arr, maxStringLength) {
|
||||
|
||||
// src/generateCommitMessageFromGitDiff.ts
|
||||
var config5 = getConfig();
|
||||
var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT || 40960 /* DEFAULT_MAX_TOKENS_INPUT */;
|
||||
var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT || 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */;
|
||||
var MAX_TOKENS_INPUT = config5.OCO_TOKENS_MAX_INPUT;
|
||||
var MAX_TOKENS_OUTPUT = config5.OCO_TOKENS_MAX_OUTPUT;
|
||||
var generateCommitMessageChatCompletionPrompt = async (diff, fullGitMojiSpec) => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
|
||||
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
|
||||
|
||||
63
package-lock.json
generated
63
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.2.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.2.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
@@ -28,7 +28,7 @@
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
"openai": "^4.56.0"
|
||||
"openai": "^4.57.0"
|
||||
},
|
||||
"bin": {
|
||||
"oco": "out/cli.cjs",
|
||||
@@ -2098,6 +2098,11 @@
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.9.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz",
|
||||
"integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg=="
|
||||
},
|
||||
"node_modules/@types/semver": {
|
||||
"version": "7.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
|
||||
@@ -7219,6 +7224,17 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.13.2",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
|
||||
"integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
@@ -7242,17 +7258,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/openai": {
|
||||
"version": "4.56.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.56.0.tgz",
|
||||
"integrity": "sha512-zcag97+3bG890MNNa0DQD9dGmmTWL8unJdNkulZzWRXrl+QeD+YkBI4H58rJcwErxqGK6a0jVPZ4ReJjhDGcmw==",
|
||||
"version": "4.57.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.57.0.tgz",
|
||||
"integrity": "sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/node-fetch": "^2.6.4",
|
||||
"@types/qs": "^6.9.7",
|
||||
"abort-controller": "^3.0.0",
|
||||
"agentkeepalive": "^4.2.1",
|
||||
"form-data-encoder": "1.7.2",
|
||||
"formdata-node": "^4.3.2",
|
||||
"node-fetch": "^2.6.7"
|
||||
"node-fetch": "^2.6.7",
|
||||
"qs": "^6.10.3"
|
||||
},
|
||||
"bin": {
|
||||
"openai": "bin/cli"
|
||||
@@ -7704,6 +7722,20 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -8050,6 +8082,23 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
|
||||
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.7",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"object-inspect": "^1.13.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/signal-exit": {
|
||||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||
|
||||
10
package.json
10
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.2.1",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -46,8 +46,9 @@
|
||||
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
|
||||
"build": "rimraf out && node esbuild.config.js",
|
||||
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
|
||||
"deploy": "npm run build:push && git push --tags && npm publish --tag latest",
|
||||
"deploy:patch": "npm version patch && npm run deploy",
|
||||
"deploy": "npm publish --tag latest",
|
||||
"deploy:build": "npm run build:push && git push --tags && npm run deploy",
|
||||
"deploy:patch": "npm version patch && npm run deploy:build",
|
||||
"lint": "eslint src --ext ts && tsc --noEmit",
|
||||
"format": "prettier --write src",
|
||||
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
|
||||
@@ -88,7 +89,6 @@
|
||||
"@google/generative-ai": "^0.11.4",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
"@octokit/webhooks-types": "^6.11.0",
|
||||
"ai": "^2.2.14",
|
||||
"axios": "^1.3.4",
|
||||
"chalk": "^5.2.0",
|
||||
"cleye": "^1.3.2",
|
||||
@@ -97,6 +97,6 @@
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
"openai": "^4.56.0"
|
||||
"openai": "^4.57.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import { configCommand } from './commands/config';
|
||||
import { hookCommand, isHookCalled } from './commands/githook.js';
|
||||
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
|
||||
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
|
||||
import { runMigrations } from './migrations/_run.js';
|
||||
|
||||
const extraArgs = process.argv.slice(2);
|
||||
|
||||
@@ -30,6 +31,7 @@ cli(
|
||||
help: { description: packageJSON.description }
|
||||
},
|
||||
async ({ flags }) => {
|
||||
await runMigrations();
|
||||
await checkIsLatestVersion();
|
||||
|
||||
if (await isHookCalled()) {
|
||||
|
||||
@@ -107,13 +107,16 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
const remotes = await getGitRemotes();
|
||||
|
||||
// user isn't pushing, return early
|
||||
if (config.OCO_GITPUSH === false) return;
|
||||
|
||||
if (!remotes.length) {
|
||||
const { stdout } = await execa('git', ['push']);
|
||||
if (stdout) outro(stdout);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (remotes.length === 1 && config.OCO_GITPUSH !== true) {
|
||||
if (remotes.length === 1) {
|
||||
const isPushConfirmedByUser = await confirm({
|
||||
message: 'Do you want to run `git push`?'
|
||||
});
|
||||
@@ -156,13 +159,13 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
)} successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
}
|
||||
} else {
|
||||
const regenerateMessage = await confirm({
|
||||
|
||||
@@ -11,14 +11,9 @@ import { TEST_MOCK_TYPES } from '../engine/testAi';
|
||||
import { getI18nLocal, i18n } from '../i18n';
|
||||
|
||||
export enum CONFIG_KEYS {
|
||||
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
|
||||
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
|
||||
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
|
||||
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
|
||||
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
|
||||
OCO_API_KEY = 'OCO_API_KEY',
|
||||
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
|
||||
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
|
||||
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
|
||||
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
|
||||
OCO_EMOJI = 'OCO_EMOJI',
|
||||
OCO_MODEL = 'OCO_MODEL',
|
||||
@@ -27,14 +22,10 @@ export enum CONFIG_KEYS {
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
|
||||
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
|
||||
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
|
||||
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
|
||||
OCO_API_URL = 'OCO_API_URL',
|
||||
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
|
||||
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
|
||||
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
|
||||
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
@@ -123,65 +114,19 @@ const validateConfig = (
|
||||
};
|
||||
|
||||
export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
|
||||
[CONFIG_KEYS.OCO_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'openai') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_OPENAI_API_KEY',
|
||||
'OCO_API_KEY',
|
||||
typeof value === 'string' && value.length > 0,
|
||||
'Empty value is not allowed'
|
||||
);
|
||||
|
||||
validateConfig(
|
||||
'OCO_OPENAI_API_KEY',
|
||||
'OCO_API_KEY',
|
||||
value,
|
||||
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'azure') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_AZURE_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'gemini') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_GEMINI_API_KEY',
|
||||
value || config.OCO_GEMINI_API_KEY || config.OCO_AI_PROVIDER === 'test',
|
||||
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'anthropic') return value;
|
||||
|
||||
validateConfig(
|
||||
'ANTHROPIC_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
|
||||
value || config.OCO_AI_PROVIDER !== 'flowise',
|
||||
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -241,11 +186,11 @@ export const configValidators = {
|
||||
return getI18nLocal(value);
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH](value: any) {
|
||||
[CONFIG_KEYS.OCO_API_URL](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
|
||||
CONFIG_KEYS.OCO_API_URL,
|
||||
typeof value === 'string',
|
||||
'Must be string'
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -315,26 +260,6 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
|
||||
value.includes('openai.azure.com'),
|
||||
'Must be in format "https://<resource name>.openai.azure.com/"'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
|
||||
typeof value === 'string' && value.includes(':'),
|
||||
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitly, there is no pattern to verify, except a column for the port number
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
|
||||
@@ -346,11 +271,11 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) {
|
||||
[CONFIG_KEYS.OCO_WHY](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OLLAMA_API_URL,
|
||||
typeof value === 'string' && value.startsWith('http'),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
CONFIG_KEYS.OCO_WHY,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
}
|
||||
@@ -367,14 +292,10 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
[CONFIG_KEYS.OCO_WHY]: boolean;
|
||||
@@ -385,16 +306,11 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
|
||||
[CONFIG_KEYS.OCO_GITPUSH]: boolean;
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string;
|
||||
};
|
||||
|
||||
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
export const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
|
||||
const assertConfigsAreValid = (config: Record<string, any>) => {
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
@@ -446,7 +362,7 @@ const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
|
||||
const parseEnvVarValue = (value?: any) => {
|
||||
const parseConfigVarValue = (value?: any) => {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
@@ -459,41 +375,45 @@ const getEnvConfig = (envPath: string) => {
|
||||
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
|
||||
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
|
||||
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
|
||||
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue(
|
||||
process.env.OCO_TOKENS_MAX_OUTPUT
|
||||
),
|
||||
|
||||
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
|
||||
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
|
||||
|
||||
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
|
||||
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
|
||||
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
|
||||
|
||||
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
|
||||
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
};
|
||||
};
|
||||
|
||||
const getGlobalConfig = (configPath: string) => {
|
||||
export const setGlobalConfig = (
|
||||
config: ConfigType,
|
||||
configPath: string = defaultConfigPath
|
||||
) => {
|
||||
writeFileSync(configPath, iniStringify(config), 'utf8');
|
||||
};
|
||||
|
||||
export const getIsGlobalConfigFileExist = (
|
||||
configPath: string = defaultConfigPath
|
||||
) => {
|
||||
return existsSync(configPath);
|
||||
};
|
||||
|
||||
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
let globalConfig: ConfigType;
|
||||
|
||||
const isGlobalConfigFileExist = existsSync(configPath);
|
||||
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
@@ -510,16 +430,18 @@ const getGlobalConfig = (configPath: string) => {
|
||||
* @param fallback - global ~/.opencommit config file
|
||||
* @returns merged config
|
||||
*/
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) =>
|
||||
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => {
|
||||
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]);
|
||||
return Array.from(allKeys).reduce((acc, key) => {
|
||||
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
};
|
||||
|
||||
interface GetConfigOptions {
|
||||
globalPath?: string;
|
||||
envPath?: string;
|
||||
setDefaultValues?: boolean;
|
||||
}
|
||||
|
||||
export const getConfig = ({
|
||||
@@ -535,13 +457,15 @@ export const getConfig = ({
|
||||
};
|
||||
|
||||
export const setConfig = (
|
||||
keyValues: [key: string, value: string][],
|
||||
keyValues: [key: string, value: string | boolean | number | null][],
|
||||
globalConfigPath: string = defaultConfigPath
|
||||
) => {
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
|
||||
const configToSet = {};
|
||||
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join('\n');
|
||||
@@ -553,7 +477,8 @@ export const setConfig = (
|
||||
let parsedConfigValue;
|
||||
|
||||
try {
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
if (typeof value === 'string') parsedConfigValue = JSON.parse(value);
|
||||
else parsedConfigValue = value;
|
||||
} catch (error) {
|
||||
parsedConfigValue = value;
|
||||
}
|
||||
@@ -563,10 +488,10 @@ export const setConfig = (
|
||||
config
|
||||
);
|
||||
|
||||
config[key] = validValue;
|
||||
configToSet[key] = validValue;
|
||||
}
|
||||
|
||||
writeFileSync(globalConfigPath, iniStringify(config), 'utf8');
|
||||
setGlobalConfig(mergeConfigs(configToSet, config), globalConfigPath);
|
||||
|
||||
outro(`${chalk.green('✔')} config successfully set`);
|
||||
};
|
||||
|
||||
@@ -39,13 +39,9 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
if (
|
||||
!config.OCO_OPENAI_API_KEY &&
|
||||
!config.OCO_ANTHROPIC_API_KEY &&
|
||||
!config.OCO_AZURE_API_KEY
|
||||
) {
|
||||
if (!config.OCO_API_KEY) {
|
||||
outro(
|
||||
'No OCO_OPENAI_API_KEY or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key via `oco config set <key>=<value>, e.g. `oco config set OCO_OPENAI_API_KEY=<value>`. For more info see https://github.com/di-sukharev/opencommit'
|
||||
'No OCO_API_KEY is set. Set your key via `oco config set OCO_API_KEY=<value>. For more info see https://github.com/di-sukharev/opencommit'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface FlowiseAiConfig extends AiEngineConfig {}
|
||||
|
||||
export class FlowiseAi implements AiEngine {
|
||||
export class FlowiseEngine implements AiEngine {
|
||||
config: FlowiseAiConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface GeminiConfig extends AiEngineConfig {}
|
||||
|
||||
export class Gemini implements AiEngine {
|
||||
export class GeminiEngine implements AiEngine {
|
||||
config: GeminiConfig;
|
||||
client: GoogleGenerativeAI;
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
|
||||
export class OllamaAi implements AiEngine {
|
||||
export class OllamaEngine implements AiEngine {
|
||||
config: OllamaConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
|
||||
@@ -6,11 +6,8 @@ import { mergeDiffs } from './utils/mergeDiffs';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
const config = getConfig();
|
||||
const MAX_TOKENS_INPUT =
|
||||
config.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
const MAX_TOKENS_OUTPUT =
|
||||
config.OCO_TOKENS_MAX_OUTPUT ||
|
||||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
const MAX_TOKENS_INPUT = config.OCO_TOKENS_MAX_INPUT;
|
||||
const MAX_TOKENS_OUTPUT = config.OCO_TOKENS_MAX_OUTPUT;
|
||||
|
||||
const generateCommitMessageChatCompletionPrompt = async (
|
||||
diff: string,
|
||||
|
||||
45
src/migrations/00_use_single_api_key_and_url.ts
Normal file
45
src/migrations/00_use_single_api_key_and_url.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
getConfig,
|
||||
OCO_AI_PROVIDER_ENUM,
|
||||
setConfig
|
||||
} from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const config = getConfig({ setDefaultValues: false });
|
||||
|
||||
const aiProvider = config.OCO_AI_PROVIDER;
|
||||
|
||||
let apiKey: string | undefined;
|
||||
let apiUrl: string | undefined;
|
||||
|
||||
if (aiProvider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
|
||||
apiKey = config['OCO_OLLAMA_API_KEY'];
|
||||
apiUrl = config['OCO_OLLAMA_API_URL'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.ANTHROPIC) {
|
||||
apiKey = config['OCO_ANTHROPIC_API_KEY'];
|
||||
apiUrl = config['OCO_ANTHROPIC_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.OPENAI) {
|
||||
apiKey = config['OCO_OPENAI_API_KEY'];
|
||||
apiUrl = config['OCO_OPENAI_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.AZURE) {
|
||||
apiKey = config['OCO_AZURE_API_KEY'];
|
||||
apiUrl = config['OCO_AZURE_ENDPOINT'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.GEMINI) {
|
||||
apiKey = config['OCO_GEMINI_API_KEY'];
|
||||
apiUrl = config['OCO_GEMINI_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.FLOWISE) {
|
||||
apiKey = config['OCO_FLOWISE_API_KEY'];
|
||||
apiUrl = config['OCO_FLOWISE_ENDPOINT'];
|
||||
} else {
|
||||
throw new Error(
|
||||
`Migration failed, set AI provider first. Run "oco config set OCO_AI_PROVIDER=<provider>", where <provider> is one of: ${Object.values(
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
).join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
if (apiKey) setConfig([[CONFIG_KEYS.OCO_API_KEY, apiKey]]);
|
||||
|
||||
if (apiUrl) setConfig([[CONFIG_KEYS.OCO_API_URL, apiUrl]]);
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
import { getGlobalConfig, setGlobalConfig } from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const obsoleteKeys = [
|
||||
'OCO_OLLAMA_API_KEY',
|
||||
'OCO_OLLAMA_API_URL',
|
||||
'OCO_ANTHROPIC_API_KEY',
|
||||
'OCO_ANTHROPIC_BASE_PATH',
|
||||
'OCO_OPENAI_API_KEY',
|
||||
'OCO_OPENAI_BASE_PATH',
|
||||
'OCO_AZURE_API_KEY',
|
||||
'OCO_AZURE_ENDPOINT',
|
||||
'OCO_GEMINI_API_KEY',
|
||||
'OCO_GEMINI_BASE_PATH',
|
||||
'OCO_FLOWISE_API_KEY',
|
||||
'OCO_FLOWISE_ENDPOINT'
|
||||
];
|
||||
|
||||
const globalConfig = getGlobalConfig();
|
||||
|
||||
const configToOverride = { ...globalConfig };
|
||||
|
||||
for (const key of obsoleteKeys) delete configToOverride[key];
|
||||
|
||||
setGlobalConfig(configToOverride);
|
||||
}
|
||||
20
src/migrations/02_set_missing_default_values.ts
Normal file
20
src/migrations/02_set_missing_default_values.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import {
|
||||
ConfigType,
|
||||
DEFAULT_CONFIG,
|
||||
getGlobalConfig,
|
||||
setConfig
|
||||
} from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const setDefaultConfigValues = (config: ConfigType) => {
|
||||
const entriesToSet: [key: string, value: string | boolean | number][] = [];
|
||||
for (const entry of Object.entries(DEFAULT_CONFIG)) {
|
||||
const [key, _value] = entry;
|
||||
if (config[key] === 'undefined') entriesToSet.push(entry);
|
||||
}
|
||||
|
||||
if (entriesToSet.length > 0) setConfig(entriesToSet);
|
||||
};
|
||||
|
||||
setDefaultConfigValues(getGlobalConfig());
|
||||
}
|
||||
18
src/migrations/_migrations.ts
Normal file
18
src/migrations/_migrations.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import migration00 from './00_use_single_api_key_and_url';
|
||||
import migration01 from './01_remove_obsolete_config_keys_from_global_file';
|
||||
import migration02 from './02_set_missing_default_values';
|
||||
|
||||
export const migrations = [
|
||||
{
|
||||
name: '00_use_single_api_key_and_url',
|
||||
run: migration00
|
||||
},
|
||||
{
|
||||
name: '01_remove_obsolete_config_keys_from_global_file',
|
||||
run: migration01
|
||||
},
|
||||
{
|
||||
name: '02_set_missing_default_values',
|
||||
run: migration02
|
||||
}
|
||||
];
|
||||
70
src/migrations/_run.ts
Normal file
70
src/migrations/_run.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import fs from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { migrations } from './_migrations';
|
||||
import { outro } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import {
|
||||
getConfig,
|
||||
getIsGlobalConfigFileExist,
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
} from '../commands/config';
|
||||
|
||||
const migrationsFile = pathJoin(homedir(), '.opencommit_migrations');
|
||||
|
||||
const getCompletedMigrations = (): string[] => {
|
||||
if (!fs.existsSync(migrationsFile)) {
|
||||
return [];
|
||||
}
|
||||
const data = fs.readFileSync(migrationsFile, 'utf-8');
|
||||
return data ? JSON.parse(data) : [];
|
||||
};
|
||||
|
||||
const saveCompletedMigration = (migrationName: string) => {
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
completedMigrations.push(migrationName);
|
||||
fs.writeFileSync(
|
||||
migrationsFile,
|
||||
JSON.stringify(completedMigrations, null, 2)
|
||||
);
|
||||
};
|
||||
|
||||
export const runMigrations = async () => {
|
||||
// if no config file, we assume it's a new installation and no migrations are needed
|
||||
if (!getIsGlobalConfigFileExist()) return;
|
||||
|
||||
const config = getConfig();
|
||||
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
|
||||
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
|
||||
let isMigrated = false;
|
||||
|
||||
for (const migration of migrations) {
|
||||
if (!completedMigrations.includes(migration.name)) {
|
||||
try {
|
||||
console.log('Applying migration', migration.name);
|
||||
migration.run();
|
||||
console.log('Migration applied successfully', migration.name);
|
||||
saveCompletedMigration(migration.name);
|
||||
} catch (error) {
|
||||
outro(
|
||||
`${chalk.red('Failed to apply migration')} ${
|
||||
migration.name
|
||||
}: ${error}`
|
||||
);
|
||||
}
|
||||
|
||||
isMigrated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isMigrated) {
|
||||
outro(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Migrations to your config were applied successfully. Please rerun.`
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
};
|
||||
@@ -2,9 +2,9 @@ import { getConfig, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
import { AnthropicEngine } from '../engine/anthropic';
|
||||
import { AzureEngine } from '../engine/azure';
|
||||
import { AiEngine } from '../engine/Engine';
|
||||
import { FlowiseAi } from '../engine/flowise';
|
||||
import { Gemini } from '../engine/gemini';
|
||||
import { OllamaAi } from '../engine/ollama';
|
||||
import { FlowiseEngine } from '../engine/flowise';
|
||||
import { GeminiEngine } from '../engine/gemini';
|
||||
import { OllamaEngine } from '../engine/ollama';
|
||||
import { OpenAiEngine } from '../engine/openAi';
|
||||
import { TestAi, TestMockType } from '../engine/testAi';
|
||||
|
||||
@@ -16,50 +16,30 @@ export function getEngine(): AiEngine {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_OPENAI_BASE_PATH!
|
||||
baseURL: config.OCO_API_URL!,
|
||||
apiKey: config.OCO_API_KEY!
|
||||
};
|
||||
|
||||
switch (provider) {
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: '',
|
||||
baseURL: config.OCO_OLLAMA_API_URL!
|
||||
});
|
||||
return new OllamaEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_ANTHROPIC_API_KEY!
|
||||
});
|
||||
return new AnthropicEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.TEST:
|
||||
return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_GEMINI_API_KEY!,
|
||||
baseURL: config.OCO_GEMINI_BASE_PATH!
|
||||
});
|
||||
return new GeminiEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AZURE:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_AZURE_API_KEY!
|
||||
});
|
||||
return new AzureEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.FLOWISE:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG,
|
||||
baseURL: config.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
|
||||
apiKey: config.OCO_FLOWISE_API_KEY!
|
||||
});
|
||||
return new FlowiseEngine(DEFAULT_CONFIG);
|
||||
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_OPENAI_API_KEY!
|
||||
});
|
||||
return new OpenAiEngine(DEFAULT_CONFIG);
|
||||
}
|
||||
}
|
||||
|
||||
205
test/e2e/gitPush.test.ts
Normal file
205
test/e2e/gitPush.test.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import path from 'path';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { exec } from 'child_process';
|
||||
import { prepareTempDir } from './utils';
|
||||
import { promisify } from 'util';
|
||||
import { render } from 'cli-testing-library';
|
||||
import { resolve } from 'path';
|
||||
import { rm } from 'fs';
|
||||
const fsExec = promisify(exec);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* [no remotes]
|
||||
*/
|
||||
const prepareNoRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
*/
|
||||
const prepareOneRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
* other ../remote2.git (fetch)
|
||||
* other ../remote2.git (push)
|
||||
*/
|
||||
const prepareTwoRemotesGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git init --bare other.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
await fsExec('git remote add other ../other.git', { cwd: gitDir });
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
describe('cli flow to push git branch', () => {
|
||||
it('do nothing when OCO_GITPUSH is set to false', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='false' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Command failed with exit code 1')
|
||||
).not.toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push and cause error when there is no remote', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Command failed with exit code 1')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push when one remote is set', async () => {
|
||||
const { gitDir, cleanup } = await prepareOneRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push when two remotes are set', async () => {
|
||||
const { gitDir, cleanup } = await prepareTwoRemotesGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
});
|
||||
@@ -17,7 +17,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
@@ -46,7 +46,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
|
||||
|
||||
expect(await findByText('Successfully committed')).toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
@@ -209,7 +209,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Choose a remote to push to')
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ export const prepareEnvironment = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
const tempDir = await prepareTempDir();
|
||||
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
@@ -30,4 +30,8 @@ export const prepareEnvironment = async (): Promise<{
|
||||
}
|
||||
}
|
||||
|
||||
export const prepareTempDir = async(): Promise<string> => {
|
||||
return await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
}
|
||||
|
||||
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { existsSync, readFileSync, rmSync } from 'fs';
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
DEFAULT_CONFIG,
|
||||
getConfig,
|
||||
setConfig
|
||||
@@ -50,14 +51,13 @@ describe('config', () => {
|
||||
describe('getConfig', () => {
|
||||
it('should prioritize local .env over global .opencommit config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-3.5-turbo',
|
||||
OCO_LANGUAGE: 'en'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'local-key',
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
|
||||
OCO_API_KEY: 'local-key',
|
||||
OCO_LANGUAGE: 'fr'
|
||||
});
|
||||
|
||||
@@ -67,22 +67,21 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
|
||||
expect(config.OCO_LANGUAGE).toEqual('fr');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
});
|
||||
|
||||
it('should fallback to global config when local config is not set', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'de',
|
||||
OCO_DESCRIPTION: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
|
||||
OCO_API_URL: 'local-api-url'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
@@ -91,8 +90,8 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
expect(config.OCO_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_API_URL).toEqual('local-api-url');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('de');
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
@@ -124,7 +123,7 @@ describe('config', () => {
|
||||
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
@@ -137,20 +136,20 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('es');
|
||||
});
|
||||
|
||||
it('should override global config with null values in local .env', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'null'
|
||||
OCO_API_KEY: 'null'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
@@ -159,7 +158,7 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual(null);
|
||||
});
|
||||
|
||||
it('should handle empty global config', async () => {
|
||||
@@ -172,7 +171,7 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(undefined);
|
||||
expect(config.OCO_API_KEY).toEqual(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -188,12 +187,12 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key_1']],
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key_1']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key_1');
|
||||
expect(fileContent).toContain('OCO_API_KEY=persisted-key_1');
|
||||
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
|
||||
expect(fileContent).toContain(`${key}=${value}`);
|
||||
});
|
||||
@@ -203,42 +202,48 @@ describe('config', () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_OPENAI_API_KEY', 'new-key'],
|
||||
['OCO_MODEL', 'gpt-4']
|
||||
[CONFIG_KEYS.OCO_API_KEY, 'new-key'],
|
||||
[CONFIG_KEYS.OCO_MODEL, 'gpt-4']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('new-key');
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
expect(config.OCO_API_KEY).toEqual('new-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
});
|
||||
|
||||
it('should update existing config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'initial-key'
|
||||
OCO_API_KEY: 'initial-key'
|
||||
});
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'updated-key']],
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'updated-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('updated-key');
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
expect(config.OCO_API_KEY).toEqual('updated-key');
|
||||
});
|
||||
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_TOKENS_MAX_INPUT', '8192'],
|
||||
['OCO_DESCRIPTION', 'true'],
|
||||
['OCO_ONE_LINE_COMMIT', 'false']
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, '8192'],
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION, 'true'],
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT, 'false']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
@@ -266,12 +271,12 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
expect(fileContent).toContain('OCO_API_KEY=persisted-key');
|
||||
});
|
||||
|
||||
it('should set multiple configs in a row and keep the changes', async () => {
|
||||
@@ -279,14 +284,17 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent1).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
expect(fileContent1).toContain('OCO_API_KEY=persisted-key');
|
||||
|
||||
await setConfig([['OCO_MODEL', 'gpt-4']], globalConfigFile.filePath);
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_MODEL, 'gpt-4']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Gemini } from '../../src/engine/gemini';
|
||||
import { GeminiEngine } from '../../src/engine/gemini';
|
||||
|
||||
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import {
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
describe('Gemini', () => {
|
||||
let gemini: Gemini;
|
||||
let gemini: GeminiEngine;
|
||||
let mockConfig: ConfigType;
|
||||
let mockGoogleGenerativeAi: GoogleGenerativeAI;
|
||||
let mockGenerativeModel: GenerativeModel;
|
||||
@@ -20,8 +20,8 @@ describe('Gemini', () => {
|
||||
const mockGemini = () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
gemini = new Gemini({
|
||||
apiKey: mockConfig.OCO_GEMINI_API_KEY,
|
||||
gemini = new GeminiEngine({
|
||||
apiKey: mockConfig.OCO_API_KEY,
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
};
|
||||
@@ -45,12 +45,10 @@ describe('Gemini', () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
|
||||
mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_MODEL = 'gemini-1.5-flash';
|
||||
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(
|
||||
mockConfig.OCO_GEMINI_API_KEY
|
||||
);
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_API_KEY);
|
||||
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user