mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-13 15:47:58 -05:00
Compare commits
2 Commits
refactorin
...
398_make_w
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23a8d76977 | ||
|
|
4387c12552 |
62
README.md
62
README.md
@@ -28,19 +28,30 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
|
||||
npm install -g opencommit
|
||||
```
|
||||
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys) or other supported LLM providers (we support them all). Make sure that you add your OpenAI payment details to your account, so the API works.
|
||||
Alternatively run it via `npx opencommit` or `bunx opencommit`
|
||||
|
||||
MacOS may ask to run the command with `sudo` when installing a package globally.
|
||||
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
|
||||
|
||||
3. Set the key to OpenCommit config:
|
||||
|
||||
```sh
|
||||
oco config set OCO_API_KEY=<your_api_key>
|
||||
oco config set OCO_OPENAI_API_KEY=<your_api_key>
|
||||
```
|
||||
|
||||
Your API key is stored locally in the `~/.opencommit` config file.
|
||||
|
||||
## Usage
|
||||
|
||||
You can call OpenCommit with `oco` command to generate a commit message for your staged changes:
|
||||
You can call OpenCommit directly to generate a commit message for your staged changes:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
opencommit
|
||||
```
|
||||
|
||||
You can also use the `oco` shortcut:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
@@ -59,17 +70,21 @@ You can also run it with local model through ollama:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
oco config set OCO_AI_PROVIDER='ollama' OCO_MODEL='llama3:8b'
|
||||
oco config set OCO_AI_PROVIDER='ollama'
|
||||
```
|
||||
|
||||
Default model is `mistral`.
|
||||
If you want to use a model other than mistral (default), you can do so by setting the `OCO_AI_PROVIDER` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_AI_PROVIDER='ollama/llama3:8b'
|
||||
```
|
||||
|
||||
If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
|
||||
|
||||
You can do so by setting the `OCO_API_URL` environment variable as follows:
|
||||
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
oco config set OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
```
|
||||
|
||||
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
|
||||
@@ -106,21 +121,22 @@ Create a `.env` file and add OpenCommit config variables there like this:
|
||||
|
||||
```env
|
||||
...
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
|
||||
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
|
||||
OCO_API_URL=<may be used to set proxy path to OpenAI api>
|
||||
OCO_OPENAI_API_KEY=<your OpenAI API token>
|
||||
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
|
||||
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
|
||||
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
|
||||
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
|
||||
OCO_EMOJI=<boolean, add GitMoji>
|
||||
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
|
||||
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any string basically, but it should be a valid model name>
|
||||
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
|
||||
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
|
||||
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama or ollama/model>
|
||||
...
|
||||
```
|
||||
|
||||
Global configs are same as local configs, but they are stored in the global `~/.opencommit` config file and set with `oco config set` command, e.g. `oco config set OCO_MODEL=gpt-4o`.
|
||||
This are not all the config options, but you get the point.
|
||||
|
||||
### Global config for all repos
|
||||
|
||||
@@ -172,26 +188,26 @@ or for as a cheaper option:
|
||||
oco config set OCO_MODEL=gpt-3.5-turbo
|
||||
```
|
||||
|
||||
### Switch to other LLM providers with a custom URL
|
||||
### Switch to Azure OpenAI
|
||||
|
||||
By default OpenCommit uses [OpenAI](https://openai.com).
|
||||
|
||||
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/) or Flowise or Ollama.
|
||||
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
|
||||
|
||||
```sh
|
||||
oco config set OCO_AI_PROVIDER=azure OCO_API_KEY=<your_azure_api_key> OCO_API_URL=<your_azure_endpoint>
|
||||
|
||||
oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_API_URL=<your_flowise_endpoint>
|
||||
|
||||
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
|
||||
opencommit config set OCO_AI_PROVIDER=azure
|
||||
```
|
||||
|
||||
Of course need to set 'OCO_OPENAI_API_KEY'. And also need to set the
|
||||
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
|
||||
'model'.
|
||||
|
||||
### Locale configuration
|
||||
|
||||
To globally specify the language used to generate commit messages:
|
||||
|
||||
```sh
|
||||
# de, German, Deutsch
|
||||
# de, German ,Deutsch
|
||||
oco config set OCO_LANGUAGE=de
|
||||
oco config set OCO_LANGUAGE=German
|
||||
oco config set OCO_LANGUAGE=Deutsch
|
||||
@@ -207,14 +223,12 @@ All available languages are currently listed in the [i18n](https://github.com/di
|
||||
|
||||
### Push to git (gonna be deprecated)
|
||||
|
||||
A prompt for pushing to git is on by default but if you would like to turn it off just use:
|
||||
A prompt to ushing to git is on by default but if you would like to turn it off just use:
|
||||
|
||||
```sh
|
||||
oco config set OCO_GITPUSH=false
|
||||
```
|
||||
|
||||
and it will exit right after commit is confirmed without asking if you would like to push to remote.
|
||||
|
||||
### Switch to `@commitlint`
|
||||
|
||||
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
|
||||
@@ -389,7 +403,7 @@ jobs:
|
||||
# set openAI api key in repo actions secrets,
|
||||
# for openAI keys go to: https://platform.openai.com/account/api-keys
|
||||
# for repo secret go to: <your_repo_url>/settings/secrets/actions
|
||||
OCO_API_KEY: ${{ secrets.OCO_API_KEY }}
|
||||
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
|
||||
|
||||
# customization
|
||||
OCO_TOKENS_MAX_INPUT: 4096
|
||||
|
||||
2600
out/cli.cjs
2600
out/cli.cjs
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
63
package-lock.json
generated
63
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.0.20",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.0.20",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
@@ -28,7 +28,7 @@
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
"openai": "^4.57.0"
|
||||
"openai": "^4.56.0"
|
||||
},
|
||||
"bin": {
|
||||
"oco": "out/cli.cjs",
|
||||
@@ -2098,11 +2098,6 @@
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/qs": {
|
||||
"version": "6.9.15",
|
||||
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz",
|
||||
"integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg=="
|
||||
},
|
||||
"node_modules/@types/semver": {
|
||||
"version": "7.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
|
||||
@@ -7224,17 +7219,6 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/object-inspect": {
|
||||
"version": "1.13.2",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
|
||||
"integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/once": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
@@ -7258,19 +7242,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/openai": {
|
||||
"version": "4.57.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.57.0.tgz",
|
||||
"integrity": "sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==",
|
||||
"version": "4.56.0",
|
||||
"resolved": "https://registry.npmjs.org/openai/-/openai-4.56.0.tgz",
|
||||
"integrity": "sha512-zcag97+3bG890MNNa0DQD9dGmmTWL8unJdNkulZzWRXrl+QeD+YkBI4H58rJcwErxqGK6a0jVPZ4ReJjhDGcmw==",
|
||||
"dependencies": {
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/node-fetch": "^2.6.4",
|
||||
"@types/qs": "^6.9.7",
|
||||
"abort-controller": "^3.0.0",
|
||||
"agentkeepalive": "^4.2.1",
|
||||
"form-data-encoder": "1.7.2",
|
||||
"formdata-node": "^4.3.2",
|
||||
"node-fetch": "^2.6.7",
|
||||
"qs": "^6.10.3"
|
||||
"node-fetch": "^2.6.7"
|
||||
},
|
||||
"bin": {
|
||||
"openai": "bin/cli"
|
||||
@@ -7722,20 +7704,6 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/qs": {
|
||||
"version": "6.13.0",
|
||||
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
|
||||
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
|
||||
"dependencies": {
|
||||
"side-channel": "^1.0.6"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
@@ -8082,23 +8050,6 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/side-channel": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
|
||||
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
|
||||
"dependencies": {
|
||||
"call-bind": "^1.0.7",
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.4",
|
||||
"object-inspect": "^1.13.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/signal-exit": {
|
||||
"version": "3.0.7",
|
||||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.1.2",
|
||||
"version": "3.1.0",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -88,6 +88,7 @@
|
||||
"@google/generative-ai": "^0.11.4",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
"@octokit/webhooks-types": "^6.11.0",
|
||||
"ai": "^2.2.14",
|
||||
"axios": "^1.3.4",
|
||||
"chalk": "^5.2.0",
|
||||
"cleye": "^1.3.2",
|
||||
@@ -96,6 +97,6 @@
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
"openai": "^4.57.0"
|
||||
"openai": "^4.56.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import { configCommand } from './commands/config';
|
||||
import { hookCommand, isHookCalled } from './commands/githook.js';
|
||||
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
|
||||
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
|
||||
import { runMigrations } from './migrations/_run.js';
|
||||
|
||||
const extraArgs = process.argv.slice(2);
|
||||
|
||||
@@ -31,7 +30,6 @@ cli(
|
||||
help: { description: packageJSON.description }
|
||||
},
|
||||
async ({ flags }) => {
|
||||
await runMigrations();
|
||||
await checkIsLatestVersion();
|
||||
|
||||
if (await isHookCalled()) {
|
||||
|
||||
@@ -107,16 +107,13 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
const remotes = await getGitRemotes();
|
||||
|
||||
// user isn't pushing, return early
|
||||
if (config.OCO_GITPUSH === false) return;
|
||||
|
||||
if (!remotes.length) {
|
||||
const { stdout } = await execa('git', ['push']);
|
||||
if (stdout) outro(stdout);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (remotes.length === 1) {
|
||||
if (remotes.length === 1 && config.OCO_GITPUSH !== true) {
|
||||
const isPushConfirmedByUser = await confirm({
|
||||
message: 'Do you want to run `git push`?'
|
||||
});
|
||||
@@ -159,13 +156,13 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} successfully pushed all commits to ${selectedRemote}`
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
}
|
||||
} else {
|
||||
const regenerateMessage = await confirm({
|
||||
|
||||
@@ -11,9 +11,14 @@ import { TEST_MOCK_TYPES } from '../engine/testAi';
|
||||
import { getI18nLocal, i18n } from '../i18n';
|
||||
|
||||
export enum CONFIG_KEYS {
|
||||
OCO_API_KEY = 'OCO_API_KEY',
|
||||
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
|
||||
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
|
||||
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
|
||||
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
|
||||
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
|
||||
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
|
||||
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
|
||||
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
|
||||
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
|
||||
OCO_EMOJI = 'OCO_EMOJI',
|
||||
OCO_MODEL = 'OCO_MODEL',
|
||||
@@ -22,10 +27,14 @@ export enum CONFIG_KEYS {
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
|
||||
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
|
||||
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
|
||||
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
|
||||
OCO_API_URL = 'OCO_API_URL',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
|
||||
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
|
||||
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
|
||||
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
@@ -114,19 +123,65 @@ const validateConfig = (
|
||||
};
|
||||
|
||||
export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_API_KEY](value: any, config: any = {}) {
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'openai') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_API_KEY',
|
||||
'OCO_OPENAI_API_KEY',
|
||||
typeof value === 'string' && value.length > 0,
|
||||
'Empty value is not allowed'
|
||||
);
|
||||
|
||||
validateConfig(
|
||||
'OCO_API_KEY',
|
||||
'OCO_OPENAI_API_KEY',
|
||||
value,
|
||||
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
|
||||
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'azure') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_AZURE_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'gemini') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_GEMINI_API_KEY',
|
||||
value || config.OCO_GEMINI_API_KEY || config.OCO_AI_PROVIDER === 'test',
|
||||
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'anthropic') return value;
|
||||
|
||||
validateConfig(
|
||||
'ANTHROPIC_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
|
||||
value || config.OCO_AI_PROVIDER !== 'flowise',
|
||||
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -186,11 +241,11 @@ export const configValidators = {
|
||||
return getI18nLocal(value);
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_API_URL](value: any) {
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_API_URL,
|
||||
CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
|
||||
typeof value === 'string',
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
'Must be string'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
@@ -260,6 +315,26 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
|
||||
value.includes('openai.azure.com'),
|
||||
'Must be in format "https://<resource name>.openai.azure.com/"'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
|
||||
typeof value === 'string' && value.includes(':'),
|
||||
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitly, there is no pattern to verify, except a column for the port number
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
|
||||
@@ -271,11 +346,11 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_WHY](value: any) {
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_WHY,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
CONFIG_KEYS.OCO_OLLAMA_API_URL,
|
||||
typeof value === 'string' && value.startsWith('http'),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
}
|
||||
@@ -292,10 +367,14 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
[CONFIG_KEYS.OCO_WHY]: boolean;
|
||||
@@ -306,11 +385,16 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
|
||||
[CONFIG_KEYS.OCO_GITPUSH]: boolean;
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string;
|
||||
};
|
||||
|
||||
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
export const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
|
||||
const assertConfigsAreValid = (config: Record<string, any>) => {
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
@@ -362,7 +446,7 @@ const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
|
||||
const parseConfigVarValue = (value?: any) => {
|
||||
const parseEnvVarValue = (value?: any) => {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
@@ -375,45 +459,41 @@ const getEnvConfig = (envPath: string) => {
|
||||
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
|
||||
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue(
|
||||
process.env.OCO_TOKENS_MAX_OUTPUT
|
||||
),
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
|
||||
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
|
||||
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
|
||||
|
||||
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
|
||||
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
|
||||
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
|
||||
|
||||
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
|
||||
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
|
||||
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
|
||||
|
||||
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
|
||||
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
|
||||
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
};
|
||||
};
|
||||
|
||||
export const setGlobalConfig = (
|
||||
config: ConfigType,
|
||||
configPath: string = defaultConfigPath
|
||||
) => {
|
||||
writeFileSync(configPath, iniStringify(config), 'utf8');
|
||||
};
|
||||
|
||||
export const getIsGlobalConfigFileExist = (
|
||||
configPath: string = defaultConfigPath
|
||||
) => {
|
||||
return existsSync(configPath);
|
||||
};
|
||||
|
||||
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
const getGlobalConfig = (configPath: string) => {
|
||||
let globalConfig: ConfigType;
|
||||
|
||||
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
|
||||
const isGlobalConfigFileExist = existsSync(configPath);
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
@@ -430,18 +510,16 @@ export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
* @param fallback - global ~/.opencommit config file
|
||||
* @returns merged config
|
||||
*/
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => {
|
||||
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]);
|
||||
return Array.from(allKeys).reduce((acc, key) => {
|
||||
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) =>
|
||||
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
};
|
||||
|
||||
interface GetConfigOptions {
|
||||
globalPath?: string;
|
||||
envPath?: string;
|
||||
setDefaultValues?: boolean;
|
||||
}
|
||||
|
||||
export const getConfig = ({
|
||||
@@ -457,15 +535,13 @@ export const getConfig = ({
|
||||
};
|
||||
|
||||
export const setConfig = (
|
||||
keyValues: [key: string, value: string | boolean | number | null][],
|
||||
keyValues: [key: string, value: string][],
|
||||
globalConfigPath: string = defaultConfigPath
|
||||
) => {
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
|
||||
const configToSet = {};
|
||||
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join('\n');
|
||||
@@ -477,8 +553,7 @@ export const setConfig = (
|
||||
let parsedConfigValue;
|
||||
|
||||
try {
|
||||
if (typeof value === 'string') parsedConfigValue = JSON.parse(value);
|
||||
else parsedConfigValue = value;
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
} catch (error) {
|
||||
parsedConfigValue = value;
|
||||
}
|
||||
@@ -488,10 +563,10 @@ export const setConfig = (
|
||||
config
|
||||
);
|
||||
|
||||
configToSet[key] = validValue;
|
||||
config[key] = validValue;
|
||||
}
|
||||
|
||||
setGlobalConfig(mergeConfigs(configToSet, config), globalConfigPath);
|
||||
writeFileSync(globalConfigPath, iniStringify(config), 'utf8');
|
||||
|
||||
outro(`${chalk.green('✔')} config successfully set`);
|
||||
};
|
||||
|
||||
@@ -39,11 +39,14 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
if (!config.OCO_API_KEY) {
|
||||
outro(
|
||||
'No OCO_API_KEY is set. Set your key via `oco config set OCO_API_KEY=<value>. For more info see https://github.com/di-sukharev/opencommit'
|
||||
if (
|
||||
!config.OCO_OPENAI_API_KEY &&
|
||||
!config.OCO_ANTHROPIC_API_KEY &&
|
||||
!config.OCO_AZURE_API_KEY
|
||||
) {
|
||||
throw new Error(
|
||||
'No OPEN_AI_API or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key in ~/.opencommit'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const spin = spinner();
|
||||
|
||||
@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface FlowiseAiConfig extends AiEngineConfig {}
|
||||
|
||||
export class FlowiseEngine implements AiEngine {
|
||||
export class FlowiseAi implements AiEngine {
|
||||
config: FlowiseAiConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface GeminiConfig extends AiEngineConfig {}
|
||||
|
||||
export class GeminiEngine implements AiEngine {
|
||||
export class Gemini implements AiEngine {
|
||||
config: GeminiConfig;
|
||||
client: GoogleGenerativeAI;
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
|
||||
export class OllamaEngine implements AiEngine {
|
||||
export class OllamaAi implements AiEngine {
|
||||
config: OllamaConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
@@ -28,10 +28,7 @@ export class OllamaEngine implements AiEngine {
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
const response = await this.client.post('', params);
|
||||
|
||||
const message = response.data.message;
|
||||
|
||||
|
||||
@@ -6,8 +6,11 @@ import { mergeDiffs } from './utils/mergeDiffs';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
const config = getConfig();
|
||||
const MAX_TOKENS_INPUT = config.OCO_TOKENS_MAX_INPUT;
|
||||
const MAX_TOKENS_OUTPUT = config.OCO_TOKENS_MAX_OUTPUT;
|
||||
const MAX_TOKENS_INPUT =
|
||||
config.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
const MAX_TOKENS_OUTPUT =
|
||||
config.OCO_TOKENS_MAX_OUTPUT ||
|
||||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
|
||||
const generateCommitMessageChatCompletionPrompt = async (
|
||||
diff: string,
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
getConfig,
|
||||
OCO_AI_PROVIDER_ENUM,
|
||||
setConfig
|
||||
} from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const config = getConfig({ setDefaultValues: false });
|
||||
|
||||
const aiProvider = config.OCO_AI_PROVIDER;
|
||||
|
||||
let apiKey: string | undefined;
|
||||
let apiUrl: string | undefined;
|
||||
|
||||
if (aiProvider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
|
||||
apiKey = config['OCO_OLLAMA_API_KEY'];
|
||||
apiUrl = config['OCO_OLLAMA_API_URL'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.ANTHROPIC) {
|
||||
apiKey = config['OCO_ANTHROPIC_API_KEY'];
|
||||
apiUrl = config['OCO_ANTHROPIC_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.OPENAI) {
|
||||
apiKey = config['OCO_OPENAI_API_KEY'];
|
||||
apiUrl = config['OCO_OPENAI_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.AZURE) {
|
||||
apiKey = config['OCO_AZURE_API_KEY'];
|
||||
apiUrl = config['OCO_AZURE_ENDPOINT'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.GEMINI) {
|
||||
apiKey = config['OCO_GEMINI_API_KEY'];
|
||||
apiUrl = config['OCO_GEMINI_BASE_PATH'];
|
||||
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.FLOWISE) {
|
||||
apiKey = config['OCO_FLOWISE_API_KEY'];
|
||||
apiUrl = config['OCO_FLOWISE_ENDPOINT'];
|
||||
} else {
|
||||
throw new Error(
|
||||
`Migration failed, set AI provider first. Run "oco config set OCO_AI_PROVIDER=<provider>", where <provider> is one of: ${Object.values(
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
).join(', ')}`
|
||||
);
|
||||
}
|
||||
|
||||
if (apiKey) setConfig([[CONFIG_KEYS.OCO_API_KEY, apiKey]]);
|
||||
|
||||
if (apiUrl) setConfig([[CONFIG_KEYS.OCO_API_URL, apiUrl]]);
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import { getGlobalConfig, setGlobalConfig } from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const obsoleteKeys = [
|
||||
'OCO_OLLAMA_API_KEY',
|
||||
'OCO_OLLAMA_API_URL',
|
||||
'OCO_ANTHROPIC_API_KEY',
|
||||
'OCO_ANTHROPIC_BASE_PATH',
|
||||
'OCO_OPENAI_API_KEY',
|
||||
'OCO_OPENAI_BASE_PATH',
|
||||
'OCO_AZURE_API_KEY',
|
||||
'OCO_AZURE_ENDPOINT',
|
||||
'OCO_GEMINI_API_KEY',
|
||||
'OCO_GEMINI_BASE_PATH',
|
||||
'OCO_FLOWISE_API_KEY',
|
||||
'OCO_FLOWISE_ENDPOINT'
|
||||
];
|
||||
|
||||
const globalConfig = getGlobalConfig();
|
||||
|
||||
const configToOverride = { ...globalConfig };
|
||||
|
||||
for (const key of obsoleteKeys) delete configToOverride[key];
|
||||
|
||||
setGlobalConfig(configToOverride);
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
import {
|
||||
ConfigType,
|
||||
DEFAULT_CONFIG,
|
||||
getGlobalConfig,
|
||||
setConfig
|
||||
} from '../commands/config';
|
||||
|
||||
export default function () {
|
||||
const setDefaultConfigValues = (config: ConfigType) => {
|
||||
const entriesToSet: [key: string, value: string | boolean | number][] = [];
|
||||
for (const entry of Object.entries(DEFAULT_CONFIG)) {
|
||||
const [key, _value] = entry;
|
||||
if (config[key] === 'undefined') entriesToSet.push(entry);
|
||||
}
|
||||
|
||||
if (entriesToSet.length > 0) setConfig(entriesToSet);
|
||||
};
|
||||
|
||||
setDefaultConfigValues(getGlobalConfig());
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import migration00 from './00_use_single_api_key_and_url';
|
||||
import migration01 from './01_remove_obsolete_config_keys_from_global_file';
|
||||
import migration02 from './02_set_missing_default_values';
|
||||
|
||||
export const migrations = [
|
||||
{
|
||||
name: '00_use_single_api_key_and_url',
|
||||
run: migration00
|
||||
},
|
||||
{
|
||||
name: '01_remove_obsolete_config_keys_from_global_file',
|
||||
run: migration01
|
||||
},
|
||||
{
|
||||
name: '02_set_missing_default_values',
|
||||
run: migration02
|
||||
}
|
||||
];
|
||||
@@ -1,70 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { migrations } from './_migrations';
|
||||
import { outro } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import {
|
||||
getConfig,
|
||||
getIsGlobalConfigFileExist,
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
} from '../commands/config';
|
||||
|
||||
const migrationsFile = pathJoin(homedir(), '.opencommit_migrations');
|
||||
|
||||
const getCompletedMigrations = (): string[] => {
|
||||
if (!fs.existsSync(migrationsFile)) {
|
||||
return [];
|
||||
}
|
||||
const data = fs.readFileSync(migrationsFile, 'utf-8');
|
||||
return data ? JSON.parse(data) : [];
|
||||
};
|
||||
|
||||
const saveCompletedMigration = (migrationName: string) => {
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
completedMigrations.push(migrationName);
|
||||
fs.writeFileSync(
|
||||
migrationsFile,
|
||||
JSON.stringify(completedMigrations, null, 2)
|
||||
);
|
||||
};
|
||||
|
||||
export const runMigrations = async () => {
|
||||
// if no config file, we assume it's a new installation and no migrations are needed
|
||||
if (!getIsGlobalConfigFileExist()) return;
|
||||
|
||||
const config = getConfig();
|
||||
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
|
||||
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
|
||||
let isMigrated = false;
|
||||
|
||||
for (const migration of migrations) {
|
||||
if (!completedMigrations.includes(migration.name)) {
|
||||
try {
|
||||
console.log('Applying migration', migration.name);
|
||||
migration.run();
|
||||
console.log('Migration applied successfully', migration.name);
|
||||
saveCompletedMigration(migration.name);
|
||||
} catch (error) {
|
||||
outro(
|
||||
`${chalk.red('Failed to apply migration')} ${
|
||||
migration.name
|
||||
}: ${error}`
|
||||
);
|
||||
}
|
||||
|
||||
isMigrated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isMigrated) {
|
||||
outro(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Migrations to your config were applied successfully. Please rerun.`
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
};
|
||||
@@ -2,9 +2,9 @@ import { getConfig, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
import { AnthropicEngine } from '../engine/anthropic';
|
||||
import { AzureEngine } from '../engine/azure';
|
||||
import { AiEngine } from '../engine/Engine';
|
||||
import { FlowiseEngine } from '../engine/flowise';
|
||||
import { GeminiEngine } from '../engine/gemini';
|
||||
import { OllamaEngine } from '../engine/ollama';
|
||||
import { FlowiseAi } from '../engine/flowise';
|
||||
import { Gemini } from '../engine/gemini';
|
||||
import { OllamaAi } from '../engine/ollama';
|
||||
import { OpenAiEngine } from '../engine/openAi';
|
||||
import { TestAi, TestMockType } from '../engine/testAi';
|
||||
|
||||
@@ -16,30 +16,50 @@ export function getEngine(): AiEngine {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_API_URL!,
|
||||
apiKey: config.OCO_API_KEY!
|
||||
baseURL: config.OCO_OPENAI_BASE_PATH!
|
||||
};
|
||||
|
||||
switch (provider) {
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
return new OllamaEngine(DEFAULT_CONFIG);
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: '',
|
||||
baseURL: config.OCO_OLLAMA_API_URL!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return new AnthropicEngine(DEFAULT_CONFIG);
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_ANTHROPIC_API_KEY!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.TEST:
|
||||
return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
return new GeminiEngine(DEFAULT_CONFIG);
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_GEMINI_API_KEY!,
|
||||
baseURL: config.OCO_GEMINI_BASE_PATH!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AZURE:
|
||||
return new AzureEngine(DEFAULT_CONFIG);
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_AZURE_API_KEY!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.FLOWISE:
|
||||
return new FlowiseEngine(DEFAULT_CONFIG);
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG,
|
||||
baseURL: config.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
|
||||
apiKey: config.OCO_FLOWISE_API_KEY!
|
||||
});
|
||||
|
||||
default:
|
||||
return new OpenAiEngine(DEFAULT_CONFIG);
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_OPENAI_API_KEY!
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
import path from 'path';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { exec } from 'child_process';
|
||||
import { prepareTempDir } from './utils';
|
||||
import { promisify } from 'util';
|
||||
import { render } from 'cli-testing-library';
|
||||
import { resolve } from 'path';
|
||||
import { rm } from 'fs';
|
||||
const fsExec = promisify(exec);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* [no remotes]
|
||||
*/
|
||||
const prepareNoRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
*/
|
||||
const prepareOneRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
* other ../remote2.git (fetch)
|
||||
* other ../remote2.git (push)
|
||||
*/
|
||||
const prepareTwoRemotesGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git init --bare other.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
await fsExec('git remote add other ../other.git', { cwd: gitDir });
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
describe('cli flow to push git branch', () => {
|
||||
it('do nothing when OCO_GITPUSH is set to false', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='false' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Command failed with exit code 1')
|
||||
).not.toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push and cause error when there is no remote', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Command failed with exit code 1')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push when one remote is set', async () => {
|
||||
const { gitDir, cleanup } = await prepareOneRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('push when two remotes are set', async () => {
|
||||
const { gitDir, cleanup } = await prepareTwoRemotesGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
});
|
||||
@@ -17,7 +17,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
@@ -46,7 +46,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
|
||||
|
||||
expect(await findByText('Successfully committed')).toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
@@ -209,7 +209,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
await oco.findByText('Choose a remote to push to')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ export const prepareEnvironment = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
@@ -30,8 +30,4 @@ export const prepareEnvironment = async (): Promise<{
|
||||
}
|
||||
}
|
||||
|
||||
export const prepareTempDir = async(): Promise<string> => {
|
||||
return await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
}
|
||||
|
||||
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { existsSync, readFileSync, rmSync } from 'fs';
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
DEFAULT_CONFIG,
|
||||
getConfig,
|
||||
setConfig
|
||||
@@ -51,13 +50,14 @@ describe('config', () => {
|
||||
describe('getConfig', () => {
|
||||
it('should prioritize local .env over global .opencommit config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-3.5-turbo',
|
||||
OCO_LANGUAGE: 'en'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_API_KEY: 'local-key',
|
||||
OCO_OPENAI_API_KEY: 'local-key',
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
|
||||
OCO_LANGUAGE: 'fr'
|
||||
});
|
||||
|
||||
@@ -67,21 +67,22 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
|
||||
expect(config.OCO_LANGUAGE).toEqual('fr');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
});
|
||||
|
||||
it('should fallback to global config when local config is not set', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'de',
|
||||
OCO_DESCRIPTION: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_API_URL: 'local-api-url'
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
@@ -90,8 +91,8 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_API_URL).toEqual('local-api-url');
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('de');
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
@@ -123,7 +124,7 @@ describe('config', () => {
|
||||
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
@@ -136,20 +137,20 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('es');
|
||||
});
|
||||
|
||||
it('should override global config with null values in local .env', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_KEY: 'global-key',
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_API_KEY: 'null'
|
||||
OCO_OPENAI_API_KEY: 'null'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
@@ -158,7 +159,7 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
|
||||
});
|
||||
|
||||
it('should handle empty global config', async () => {
|
||||
@@ -171,7 +172,7 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual(undefined);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -187,12 +188,12 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key_1']],
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key_1']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_API_KEY=persisted-key_1');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key_1');
|
||||
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
|
||||
expect(fileContent).toContain(`${key}=${value}`);
|
||||
});
|
||||
@@ -202,48 +203,42 @@ describe('config', () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
[CONFIG_KEYS.OCO_API_KEY, 'new-key'],
|
||||
[CONFIG_KEYS.OCO_MODEL, 'gpt-4']
|
||||
['OCO_OPENAI_API_KEY', 'new-key'],
|
||||
['OCO_MODEL', 'gpt-4']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
expect(config.OCO_API_KEY).toEqual('new-key');
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('new-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
});
|
||||
|
||||
it('should update existing config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_KEY: 'initial-key'
|
||||
OCO_OPENAI_API_KEY: 'initial-key'
|
||||
});
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'updated-key']],
|
||||
[['OCO_OPENAI_API_KEY', 'updated-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
expect(config.OCO_API_KEY).toEqual('updated-key');
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('updated-key');
|
||||
});
|
||||
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, '8192'],
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION, 'true'],
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT, 'false']
|
||||
['OCO_TOKENS_MAX_INPUT', '8192'],
|
||||
['OCO_DESCRIPTION', 'true'],
|
||||
['OCO_ONE_LINE_COMMIT', 'false']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
@@ -271,12 +266,12 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_API_KEY=persisted-key');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
});
|
||||
|
||||
it('should set multiple configs in a row and keep the changes', async () => {
|
||||
@@ -284,17 +279,14 @@ describe('config', () => {
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent1).toContain('OCO_API_KEY=persisted-key');
|
||||
expect(fileContent1).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_MODEL, 'gpt-4']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
await setConfig([['OCO_MODEL', 'gpt-4']], globalConfigFile.filePath);
|
||||
|
||||
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { GeminiEngine } from '../../src/engine/gemini';
|
||||
import { Gemini } from '../../src/engine/gemini';
|
||||
|
||||
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import {
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
describe('Gemini', () => {
|
||||
let gemini: GeminiEngine;
|
||||
let gemini: Gemini;
|
||||
let mockConfig: ConfigType;
|
||||
let mockGoogleGenerativeAi: GoogleGenerativeAI;
|
||||
let mockGenerativeModel: GenerativeModel;
|
||||
@@ -20,8 +20,8 @@ describe('Gemini', () => {
|
||||
const mockGemini = () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
gemini = new GeminiEngine({
|
||||
apiKey: mockConfig.OCO_API_KEY,
|
||||
gemini = new Gemini({
|
||||
apiKey: mockConfig.OCO_GEMINI_API_KEY,
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
};
|
||||
@@ -45,10 +45,12 @@ describe('Gemini', () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
|
||||
mockConfig.OCO_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_MODEL = 'gemini-1.5-flash';
|
||||
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_API_KEY);
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(
|
||||
mockConfig.OCO_GEMINI_API_KEY
|
||||
);
|
||||
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user