Compare commits

..

24 Commits

Author SHA1 Message Date
di-sukharev
676c2b20c1 update package-lock.json 2024-08-27 16:58:22 +03:00
di-sukharev
21da102f6d Merge remote-tracking branch 'origin/dev' into refactoring_v1 2024-08-27 16:57:54 +03:00
di-sukharev
6ae7ce2720 chore(config.ts): remove debug console log to clean up the code and improve readability 2024-08-27 16:49:17 +03:00
di-sukharev
e44610fea3 chore(.gitignore): add notes.md to .gitignore to prevent tracking of notes files 2024-08-27 16:49:03 +03:00
di-sukharev
9dcb264420 test(config.test.ts): refactor generateConfig function to accept an object for content to improve readability and maintainability 2024-08-20 21:36:00 +03:00
di-sukharev
dd7fdba94e fix(config.ts): revert OCO_GITPUSH to its original position in the config object for clarity
refactor(config.ts): rename configFromEnv to envConfig for better readability
refactor(gemini.ts): simplify client initialization in the Gemini constructor
test(config.test.ts): add test case to check overriding global config with null values in local .env
test(gemini.test.ts): update AI provider assignment to use OCO_AI_PROVIDER_ENUM for consistency
2024-08-20 21:32:16 +03:00
di-sukharev
5fa12e2d4a feat(config): export OCO_AI_PROVIDER_ENUM to allow external access to AI provider constants
refactor(config): simplify mergeObjects function to improve readability and maintainability
refactor(setConfig): remove unnecessary keysToSet variable to streamline logging
refactor(engine): update switch cases to use OCO_AI_PROVIDER_ENUM for better consistency and clarity
2024-08-20 15:37:41 +03:00
di-sukharev
42a36492ad build 2024-08-20 15:37:33 +03:00
di-sukharev
443d27fc8d chore(docs): mark "Push to git" section in README as deprecated to inform users of upcoming changes
refactor(commit.ts): remove early return for non-pushing users to streamline commit process
refactor(config.ts): add deprecation comments for OCO_GITPUSH to indicate future removal
test(config.test.ts): enhance tests to ensure correct handling of local and global config priorities
test(gemini.test.ts): improve tests for Gemini class to ensure proper functionality and error handling
2024-08-20 15:34:09 +03:00
di-sukharev
04991dd00f fix(engine.ts): include DEFAULT_CONFIG in Gemini and Azure engine instantiation to ensure consistent configuration across engines 2024-08-20 12:58:00 +03:00
di-sukharev
3ded6062c1 fix: remove optional chaining from config access to ensure compatibility and prevent potential runtime errors
refactor(flowise.ts, ollama.ts): update axios client configuration to use a consistent URL format for API requests
fix: update README example to reflect the removal of optional chaining in config access
2024-08-20 12:32:40 +03:00
di-sukharev
f8584e7b78 refactor(engine): rename basePath to baseURL for consistency across interfaces and implementations
fix(engine): update Azure and Flowise engines to use baseURL instead of basePath for API configuration
fix(engine): adjust Ollama engine to handle baseURL and fallback to default URL
style(engine): clean up constructor formatting in OpenAiEngine for better readability
chore(engine): update getEngine function to use baseURL in configuration for all engines
2024-08-20 12:21:13 +03:00
di-sukharev
94faceefd3 remove mb confusing line 2024-08-20 12:06:01 +03:00
di-sukharev
720cd6f9c1 clear readme 2024-08-20 12:05:15 +03:00
di-sukharev
b6a92d557f docs(README.md): update author section and clarify API key storage details
docs(README.md): improve instructions for using OpenCommit CLI and configuration
fix(README.md): correct default model name to gpt-4o-mini in usage examples
fix(package.json): update openai package version to 4.56.0 for compatibility
2024-08-20 12:04:07 +03:00
di-sukharev
71354e4687 feat: add CommandsEnum to define command constants for better maintainability
refactor(generateCommitMessageFromGitDiff): update types for OpenAI messages to improve type safety
fix(commitlint/config): remove optional chaining for OCO_LANGUAGE to ensure proper access
refactor(commitlint/prompts): update types for OpenAI messages to improve type safety
refactor(prompts): update types for OpenAI messages to improve type safety
2024-08-20 12:03:40 +03:00
di-sukharev
8f85ee8f8e refactor(testAi.ts): update import statements to use OpenAI type for better clarity and maintainability
fix(testAi.ts): change parameter type in generateCommitMessage method to align with OpenAI's updated type definitions
2024-08-20 12:01:51 +03:00
di-sukharev
f9103a3c6a build 2024-08-20 12:01:38 +03:00
di-sukharev
4afd7de7a8 feat(commands): add COMMANDS enum to standardize command names across the application
refactor(commit.ts): restructure generateCommitMessageFromGitDiff function to use an interface for parameters and improve readability
fix(config.ts): update DEFAULT_TOKEN_LIMITS to correct values for max tokens input and output
chore(config.ts): enhance config validation to handle undefined and null values more effectively
style(commit.ts): improve formatting and consistency in the commit confirmation logic
style(config.ts): clean up error messages and improve clarity in config setting process
2024-08-20 12:01:14 +03:00
di-sukharev
5cfa3cded2 feat(engine): refactor AI engine interfaces and implementations to support multiple AI providers and improve configurability
- Introduce `AiEngineConfig` interface for consistent configuration across AI engines.
- Update `generateCommitMessage` method signatures to use `OpenAIClient.Chat.Completions.ChatCompletionMessageParam`.
- Implement specific configurations for each AI provider (Anthropic, Azure, Gemini, Ollama, OpenAI) to enhance flexibility.
- Replace hardcoded values with configurable parameters for model, API key, and token limits.
- Refactor client initialization to use Axios instances for better HTTP request handling.
- Remove deprecated code and improve error handling for better user feedback.
2024-08-20 11:58:19 +03:00
di-sukharev
bb0b0e804e build 2024-08-20 11:56:44 +03:00
di-sukharev
5d87cc514b feat(ENUMS.ts): add ENUMS file to centralize command constants
refactor(commitlint.ts): update import path to use ENUMS for command constants
refactor(config.ts): update import path to use ENUMS for command constants
refactor(githook.ts): update import path to use ENUMS for command constants
fix(prompts.ts): correct conventional commit keywords instruction text
2024-08-19 14:09:27 +03:00
di-sukharev
6f4e8fde93 docs(README.md): update usage examples to remove redundant 'opencommit' command
chore(example.txt): remove unused example.txt file
fix(config.ts): correct import order and improve validation messages
fix(githook.ts): improve error message for unsupported mode
fix(azure.ts): add non-null assertion for message content
fix(gemini.ts): use strict equality for role comparison
refactor(generateCommitMessageFromGitDiff.ts): reorder imports for consistency
refactor(github-action.ts): reorder imports for consistency
refactor(prompts.ts): simplify prompt content generation and improve readability
style(engine.ts): fix inconsistent spacing and import order
2024-08-19 14:00:08 +03:00
di-sukharev
745bb5218f update imports 2024-08-19 13:09:46 +03:00
30 changed files with 946 additions and 5751 deletions

View File

@@ -28,19 +28,30 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
npm install -g opencommit
```
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys) or other supported LLM providers (we support them all). Make sure that you add your OpenAI payment details to your account, so the API works.
Alternatively run it via `npx opencommit` or `bunx opencommit`
MacOS may ask to run the command with `sudo` when installing a package globally.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
3. Set the key to OpenCommit config:
```sh
oco config set OCO_API_KEY=<your_api_key>
oco config set OCO_OPENAI_API_KEY=<your_api_key>
```
Your API key is stored locally in the `~/.opencommit` config file.
## Usage
You can call OpenCommit with `oco` command to generate a commit message for your staged changes:
You can call OpenCommit directly to generate a commit message for your staged changes:
```sh
git add <files...>
opencommit
```
You can also use the `oco` shortcut:
```sh
git add <files...>
@@ -59,17 +70,21 @@ You can also run it with local model through ollama:
```sh
git add <files...>
oco config set OCO_AI_PROVIDER='ollama' OCO_MODEL='llama3:8b'
oco config set OCO_AI_PROVIDER='ollama'
```
Default model is `mistral`.
If you want to use a model other than mistral (default), you can do so by setting the `OCO_AI_PROVIDER` environment variable as follows:
```sh
oco config set OCO_AI_PROVIDER='ollama/llama3:8b'
```
If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
You can do so by setting the `OCO_API_URL` environment variable as follows:
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
```sh
oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
oco config set OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat'
```
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
@@ -106,21 +121,22 @@ Create a `.env` file and add OpenCommit config variables there like this:
```env
...
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
OCO_API_URL=<may be used to set proxy path to OpenAI api>
OCO_OPENAI_API_KEY=<your OpenAI API token>
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
OCO_EMOJI=<boolean, add GitMoji>
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any string basically, but it should be a valid model name>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama or ollama/model>
...
```
Global configs are same as local configs, but they are stored in the global `~/.opencommit` config file and set with `oco config set` command, e.g. `oco config set OCO_MODEL=gpt-4o`.
This are not all the config options, but you get the point.
### Global config for all repos
@@ -146,16 +162,6 @@ oco config set OCO_EMOJI=false
Other config options are behaving the same.
### Output WHY the changes were done (WIP)
You can set the `OCO_WHY` config to `true` to have OpenCommit output a short description of WHY the changes were done after the commit message. Default is `false`.
To make this perform accurate we must store 'what files do' in some kind of an index or embedding and perform a lookup (kinda RAG) for the accurate git commit message. If you feel like building this comment on this ticket https://github.com/di-sukharev/opencommit/issues/398 and let's go from there together.
```sh
oco config set OCO_WHY=true
```
### Switch to GPT-4 or other models
By default, OpenCommit uses `gpt-4o-mini` model.
@@ -172,26 +178,26 @@ or for as a cheaper option:
oco config set OCO_MODEL=gpt-3.5-turbo
```
### Switch to other LLM providers with a custom URL
### Switch to Azure OpenAI
By default OpenCommit uses [OpenAI](https://openai.com).
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/) or Flowise or Ollama.
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
```sh
oco config set OCO_AI_PROVIDER=azure OCO_API_KEY=<your_azure_api_key> OCO_API_URL=<your_azure_endpoint>
oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_API_URL=<your_flowise_endpoint>
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
opencommit config set OCO_AI_PROVIDER=azure
```
Of course need to set 'OCO_OPENAI_API_KEY'. And also need to set the
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
'model'.
### Locale configuration
To globally specify the language used to generate commit messages:
```sh
# de, German, Deutsch
# de, German ,Deutsch
oco config set OCO_LANGUAGE=de
oco config set OCO_LANGUAGE=German
oco config set OCO_LANGUAGE=Deutsch
@@ -207,14 +213,12 @@ All available languages are currently listed in the [i18n](https://github.com/di
### Push to git (gonna be deprecated)
A prompt for pushing to git is on by default but if you would like to turn it off just use:
A prompt to ushing to git is on by default but if you would like to turn it off just use:
```sh
oco config set OCO_GITPUSH=false
```
and it will exit right after commit is confirmed without asking if you would like to push to remote.
### Switch to `@commitlint`
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
@@ -389,7 +393,7 @@ jobs:
# set openAI api key in repo actions secrets,
# for openAI keys go to: https://platform.openai.com/account/api-keys
# for repo secret go to: <your_repo_url>/settings/secrets/actions
OCO_API_KEY: ${{ secrets.OCO_API_KEY }}
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
# customization
OCO_TOKENS_MAX_INPUT: 4096

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

63
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "opencommit",
"version": "3.1.2",
"version": "3.0.20",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "opencommit",
"version": "3.1.2",
"version": "3.0.20",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.0",
@@ -28,7 +28,7 @@
"ignore": "^5.2.4",
"ini": "^3.0.1",
"inquirer": "^9.1.4",
"openai": "^4.57.0"
"openai": "^4.56.0"
},
"bin": {
"oco": "out/cli.cjs",
@@ -2098,11 +2098,6 @@
"form-data": "^4.0.0"
}
},
"node_modules/@types/qs": {
"version": "6.9.15",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz",
"integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg=="
},
"node_modules/@types/semver": {
"version": "7.5.8",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
@@ -7224,17 +7219,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/object-inspect": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
"integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -7258,19 +7242,17 @@
}
},
"node_modules/openai": {
"version": "4.57.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.57.0.tgz",
"integrity": "sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==",
"version": "4.56.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.56.0.tgz",
"integrity": "sha512-zcag97+3bG890MNNa0DQD9dGmmTWL8unJdNkulZzWRXrl+QeD+YkBI4H58rJcwErxqGK6a0jVPZ4ReJjhDGcmw==",
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"@types/qs": "^6.9.7",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7",
"qs": "^6.10.3"
"node-fetch": "^2.6.7"
},
"bin": {
"openai": "bin/cli"
@@ -7722,20 +7704,6 @@
}
]
},
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"dependencies": {
"side-channel": "^1.0.6"
},
"engines": {
"node": ">=0.6"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/queue-microtask": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -8082,23 +8050,6 @@
"node": ">=8"
}
},
"node_modules/side-channel": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"dependencies": {
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "opencommit",
"version": "3.1.2",
"version": "3.0.20",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",
@@ -88,6 +88,7 @@
"@google/generative-ai": "^0.11.4",
"@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0",
"ai": "^2.2.14",
"axios": "^1.3.4",
"chalk": "^5.2.0",
"cleye": "^1.3.2",
@@ -96,6 +97,6 @@
"ignore": "^5.2.4",
"ini": "^3.0.1",
"inquirer": "^9.1.4",
"openai": "^4.57.0"
"openai": "^4.56.0"
}
}

View File

@@ -9,7 +9,6 @@ import { configCommand } from './commands/config';
import { hookCommand, isHookCalled } from './commands/githook.js';
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
import { runMigrations } from './migrations/_run.js';
const extraArgs = process.argv.slice(2);
@@ -31,7 +30,6 @@ cli(
help: { description: packageJSON.description }
},
async ({ flags }) => {
await runMigrations();
await checkIsLatestVersion();
if (await isHookCalled()) {

View File

@@ -50,8 +50,8 @@ const generateCommitMessageFromGitDiff = async ({
skipCommitConfirmation = false
}: GenerateCommitMessageFromGitDiffParams): Promise<void> => {
await assertGitRepo();
const commitGenerationSpinner = spinner();
commitGenerationSpinner.start('Generating the commit message');
const commitSpinner = spinner();
commitSpinner.start('Generating the commit message');
try {
let commitMessage = await generateCommitMessageByDiff(
@@ -73,7 +73,7 @@ const generateCommitMessageFromGitDiff = async ({
);
}
commitGenerationSpinner.stop('📝 Commit message generated');
commitSpinner.stop('📝 Commit message generated');
outro(
`Generated commit message:
@@ -88,42 +88,32 @@ ${chalk.grey('——————————————————')}`
message: 'Confirm the commit message?'
}));
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
if (isCommitConfirmedByUser) {
const committingChangesSpinner = spinner();
committingChangesSpinner.start('Committing the changes');
if (isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
const { stdout } = await execa('git', [
'commit',
'-m',
commitMessage,
...extraArgs
]);
committingChangesSpinner.stop(
`${chalk.green('✔')} Successfully committed`
);
outro(`${chalk.green('✔')} Successfully committed`);
outro(stdout);
const remotes = await getGitRemotes();
// user isn't pushing, return early
if (config.OCO_GITPUSH === false) return;
if (!remotes.length) {
const { stdout } = await execa('git', ['push']);
if (stdout) outro(stdout);
process.exit(0);
}
if (remotes.length === 1) {
if (remotes.length === 1 && config.OCO_GITPUSH !== true) {
const isPushConfirmedByUser = await confirm({
message: 'Do you want to run `git push`?'
});
if (isCancel(isPushConfirmedByUser)) process.exit(1);
if (isPushConfirmedByUser) {
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
@@ -151,30 +141,28 @@ ${chalk.grey('——————————————————')}`
options: remotes.map((remote) => ({ value: remote, label: remote }))
})) as string;
if (isCancel(selectedRemote)) process.exit(1);
if (!isCancel(selectedRemote)) {
const pushSpinner = spinner();
const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
const { stdout } = await execa('git', ['push', selectedRemote]);
const { stdout } = await execa('git', ['push', selectedRemote]);
pushSpinner.stop(
`${chalk.green(
'✔'
)} Successfully pushed all commits to ${selectedRemote}`
);
if (stdout) outro(stdout);
pushSpinner.stop(
`${chalk.green(
'✔'
)} successfully pushed all commits to ${selectedRemote}`
);
if (stdout) outro(stdout);
} else outro(`${chalk.gray('✖')} process cancelled`);
}
} else {
}
if (!isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
const regenerateMessage = await confirm({
message: 'Do you want to regenerate the message?'
});
if (isCancel(regenerateMessage)) process.exit(1);
if (regenerateMessage) {
if (regenerateMessage && !isCancel(isCommitConfirmedByUser)) {
await generateCommitMessageFromGitDiff({
diff,
extraArgs,
@@ -183,7 +171,7 @@ ${chalk.grey('——————————————————')}`
}
}
} catch (error) {
commitGenerationSpinner.stop('📝 Commit message generated');
commitSpinner.stop('📝 Commit message generated');
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);
@@ -231,9 +219,10 @@ export async function commit(
message: 'Do you want to stage all files and generate commit message?'
});
if (isCancel(isStageAllAndCommitConfirmedByUser)) process.exit(1);
if (isStageAllAndCommitConfirmedByUser) {
if (
isStageAllAndCommitConfirmedByUser &&
!isCancel(isStageAllAndCommitConfirmedByUser)
) {
await commit(extraArgs, true, fullGitMojiSpec);
process.exit(1);
}

View File

@@ -23,7 +23,7 @@ export const commitlintConfigCommand = command(
if (mode === CONFIG_MODES.get) {
const commitLintConfig = await getCommitlintLLMConfig();
outro(JSON.stringify(commitLintConfig, null, 2));
outro(commitLintConfig.toString());
return;
}

View File

@@ -11,21 +11,29 @@ import { TEST_MOCK_TYPES } from '../engine/testAi';
import { getI18nLocal, i18n } from '../i18n';
export enum CONFIG_KEYS {
OCO_API_KEY = 'OCO_API_KEY',
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
OCO_EMOJI = 'OCO_EMOJI',
OCO_MODEL = 'OCO_MODEL',
OCO_LANGUAGE = 'OCO_LANGUAGE',
OCO_WHY = 'OCO_WHY',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
OCO_API_URL = 'OCO_API_URL',
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
}
export enum CONFIG_MODES {
@@ -114,19 +122,65 @@ const validateConfig = (
};
export const configValidators = {
[CONFIG_KEYS.OCO_API_KEY](value: any, config: any = {}) {
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'openai') return value;
validateConfig(
'OCO_API_KEY',
'OCO_OPENAI_API_KEY',
typeof value === 'string' && value.length > 0,
'Empty value is not allowed'
);
validateConfig(
'OCO_API_KEY',
'OCO_OPENAI_API_KEY',
value,
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'azure') return value;
validateConfig(
'OCO_AZURE_API_KEY',
!!value,
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_GEMINI_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'gemini') return value;
validateConfig(
'OCO_GEMINI_API_KEY',
value || config.OCO_GEMINI_API_KEY || config.OCO_AI_PROVIDER === 'test',
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'anthropic') return value;
validateConfig(
'ANTHROPIC_API_KEY',
!!value,
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
validateConfig(
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
value || config.OCO_AI_PROVIDER !== 'flowise',
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
);
return value;
@@ -186,11 +240,11 @@ export const configValidators = {
return getI18nLocal(value);
},
[CONFIG_KEYS.OCO_API_URL](value: any) {
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH](value: any) {
validateConfig(
CONFIG_KEYS.OCO_API_URL,
CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
typeof value === 'string',
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
'Must be string'
);
return value;
},
@@ -260,6 +314,26 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
value.includes('openai.azure.com'),
'Must be in format "https://<resource name>.openai.azure.com/"'
);
return value;
},
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
typeof value === 'string' && value.includes(':'),
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitly, there is no pattern to verify, except a column for the port number
);
return value;
},
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
validateConfig(
CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
@@ -271,11 +345,11 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_WHY](value: any) {
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) {
validateConfig(
CONFIG_KEYS.OCO_WHY,
typeof value === 'boolean',
'Must be true or false'
CONFIG_KEYS.OCO_OLLAMA_API_URL,
typeof value === 'string' && value.startsWith('http'),
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
);
return value;
}
@@ -292,13 +366,16 @@ export enum OCO_AI_PROVIDER_ENUM {
}
export type ConfigType = {
[CONFIG_KEYS.OCO_API_KEY]?: string;
[CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string;
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string;
[CONFIG_KEYS.OCO_AZURE_API_KEY]?: string;
[CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string;
[CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string;
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
[CONFIG_KEYS.OCO_API_URL]?: string;
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
[CONFIG_KEYS.OCO_EMOJI]: boolean;
[CONFIG_KEYS.OCO_WHY]: boolean;
[CONFIG_KEYS.OCO_MODEL]: string;
[CONFIG_KEYS.OCO_LANGUAGE]: string;
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER]: string;
@@ -306,11 +383,16 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
[CONFIG_KEYS.OCO_GITPUSH]: boolean;
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
[CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string;
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
[CONFIG_KEYS.OCO_API_URL]?: string;
[CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string;
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string;
[CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string;
};
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
export const defaultEnvPath = pathResolve(process.cwd(), '.env');
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
const defaultEnvPath = pathResolve(process.cwd(), '.env');
const assertConfigsAreValid = (config: Record<string, any>) => {
for (const [key, value] of Object.entries(config)) {
@@ -340,29 +422,28 @@ enum OCO_PROMPT_MODULE_ENUM {
COMMITLINT = '@commitlint'
}
export const DEFAULT_CONFIG = {
OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT,
OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT,
OCO_DESCRIPTION: false,
OCO_EMOJI: false,
OCO_MODEL: getDefaultModel('openai'),
OCO_LANGUAGE: 'en',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg',
OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT,
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: 'commit-message',
OCO_FLOWISE_ENDPOINT: ':',
OCO_WHY: false,
OCO_GITPUSH: true // todo: deprecate
const initGlobalConfig = () => {
const defaultConfig = {
OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT,
OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT,
OCO_DESCRIPTION: false,
OCO_EMOJI: false,
OCO_MODEL: getDefaultModel('openai'),
OCO_LANGUAGE: 'en',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg',
OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT,
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: 'commit-message',
OCO_FLOWISE_ENDPOINT: ':',
OCO_GITPUSH: true // todo: deprecate
};
writeFileSync(defaultConfigPath, iniStringify(defaultConfig), 'utf8');
return defaultConfig;
};
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
writeFileSync(configPath, iniStringify(DEFAULT_CONFIG), 'utf8');
return DEFAULT_CONFIG;
};
const parseConfigVarValue = (value?: any) => {
const parseEnvVarValue = (value?: any) => {
try {
return JSON.parse(value);
} catch (error) {
@@ -370,101 +451,73 @@ const parseConfigVarValue = (value?: any) => {
}
};
const getEnvConfig = (envPath: string) => {
export const getConfig = ({
configPath = defaultConfigPath,
envPath = defaultEnvPath
}: {
configPath?: string;
envPath?: string;
} = {}): ConfigType => {
dotenv.config({ path: envPath });
return {
const envConfig = {
OCO_MODEL: process.env.OCO_MODEL,
OCO_API_URL: process.env.OCO_API_URL,
OCO_API_KEY: process.env.OCO_API_KEY,
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue(
process.env.OCO_TOKENS_MAX_OUTPUT
),
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION),
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI),
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH) // todo: deprecate
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
};
};
export const setGlobalConfig = (
config: ConfigType,
configPath: string = defaultConfigPath
) => {
writeFileSync(configPath, iniStringify(config), 'utf8');
};
export const getIsGlobalConfigFileExist = (
configPath: string = defaultConfigPath
) => {
return existsSync(configPath);
};
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
let globalConfig: ConfigType;
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
const isGlobalConfigFileExist = existsSync(configPath);
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig();
else {
const configFile = readFileSync(configPath, 'utf8');
globalConfig = iniParse(configFile) as ConfigType;
}
return globalConfig;
};
const mergeObjects = (main: Partial<ConfigType>, fallback: ConfigType) =>
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
/**
* Merges two configs.
* Env config takes precedence over global ~/.opencommit config file
* @param main - env config
* @param fallback - global ~/.opencommit config file
* @returns merged config
*/
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => {
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]);
return Array.from(allKeys).reduce((acc, key) => {
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
return acc;
}, {} as ConfigType);
};
return acc;
}, {} as ConfigType);
interface GetConfigOptions {
globalPath?: string;
envPath?: string;
setDefaultValues?: boolean;
}
export const getConfig = ({
envPath = defaultEnvPath,
globalPath = defaultConfigPath
}: GetConfigOptions = {}): ConfigType => {
const envConfig = getEnvConfig(envPath);
const globalConfig = getGlobalConfig(globalPath);
const config = mergeConfigs(envConfig, globalConfig);
// env config takes precedence over global ~/.opencommit config file
const config = mergeObjects(envConfig, globalConfig);
return config;
};
export const setConfig = (
keyValues: [key: string, value: string | boolean | number | null][],
globalConfigPath: string = defaultConfigPath
keyValues: [key: string, value: string][],
configPath: string = defaultConfigPath
) => {
const config = getConfig({
globalPath: globalConfigPath
});
const configToSet = {};
const config = getConfig();
for (let [key, value] of keyValues) {
if (!configValidators.hasOwnProperty(key)) {
@@ -477,8 +530,7 @@ export const setConfig = (
let parsedConfigValue;
try {
if (typeof value === 'string') parsedConfigValue = JSON.parse(value);
else parsedConfigValue = value;
parsedConfigValue = JSON.parse(value);
} catch (error) {
parsedConfigValue = value;
}
@@ -488,10 +540,12 @@ export const setConfig = (
config
);
configToSet[key] = validValue;
config[key] = validValue;
}
setGlobalConfig(mergeConfigs(configToSet, config), globalConfigPath);
writeFileSync(configPath, iniStringify(config), 'utf8');
assertConfigsAreValid(config);
outro(`${chalk.green('✔')} config successfully set`);
};

View File

@@ -39,11 +39,14 @@ export const prepareCommitMessageHook = async (
const config = getConfig();
if (!config.OCO_API_KEY) {
outro(
'No OCO_API_KEY is set. Set your key via `oco config set OCO_API_KEY=<value>. For more info see https://github.com/di-sukharev/opencommit'
if (
!config.OCO_OPENAI_API_KEY &&
!config.OCO_ANTHROPIC_API_KEY &&
!config.OCO_AZURE_API_KEY
) {
throw new Error(
'No OPEN_AI_API or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key in ~/.opencommit'
);
return;
}
const spin = spinner();

View File

@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface FlowiseAiConfig extends AiEngineConfig {}
export class FlowiseEngine implements AiEngine {
export class FlowiseAi implements AiEngine {
config: FlowiseAiConfig;
client: AxiosInstance;

View File

@@ -11,7 +11,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface GeminiConfig extends AiEngineConfig {}
export class GeminiEngine implements AiEngine {
export class Gemini implements AiEngine {
config: GeminiConfig;
client: GoogleGenerativeAI;

View File

@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface OllamaConfig extends AiEngineConfig {}
export class OllamaEngine implements AiEngine {
export class OllamaAi implements AiEngine {
config: OllamaConfig;
client: AxiosInstance;
@@ -28,10 +28,7 @@ export class OllamaEngine implements AiEngine {
stream: false
};
try {
const response = await this.client.post(
this.client.getUri(this.config),
params
);
const response = await this.client.post('', params);
const message = response.data.message;

View File

@@ -6,8 +6,11 @@ import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount';
const config = getConfig();
const MAX_TOKENS_INPUT = config.OCO_TOKENS_MAX_INPUT;
const MAX_TOKENS_OUTPUT = config.OCO_TOKENS_MAX_OUTPUT;
const MAX_TOKENS_INPUT =
config.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
const MAX_TOKENS_OUTPUT =
config.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const generateCommitMessageChatCompletionPrompt = async (
diff: string,

View File

@@ -1,45 +0,0 @@
import {
CONFIG_KEYS,
getConfig,
OCO_AI_PROVIDER_ENUM,
setConfig
} from '../commands/config';
export default function () {
const config = getConfig({ setDefaultValues: false });
const aiProvider = config.OCO_AI_PROVIDER;
let apiKey: string | undefined;
let apiUrl: string | undefined;
if (aiProvider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
apiKey = config['OCO_OLLAMA_API_KEY'];
apiUrl = config['OCO_OLLAMA_API_URL'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.ANTHROPIC) {
apiKey = config['OCO_ANTHROPIC_API_KEY'];
apiUrl = config['OCO_ANTHROPIC_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.OPENAI) {
apiKey = config['OCO_OPENAI_API_KEY'];
apiUrl = config['OCO_OPENAI_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.AZURE) {
apiKey = config['OCO_AZURE_API_KEY'];
apiUrl = config['OCO_AZURE_ENDPOINT'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.GEMINI) {
apiKey = config['OCO_GEMINI_API_KEY'];
apiUrl = config['OCO_GEMINI_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.FLOWISE) {
apiKey = config['OCO_FLOWISE_API_KEY'];
apiUrl = config['OCO_FLOWISE_ENDPOINT'];
} else {
throw new Error(
`Migration failed, set AI provider first. Run "oco config set OCO_AI_PROVIDER=<provider>", where <provider> is one of: ${Object.values(
OCO_AI_PROVIDER_ENUM
).join(', ')}`
);
}
if (apiKey) setConfig([[CONFIG_KEYS.OCO_API_KEY, apiKey]]);
if (apiUrl) setConfig([[CONFIG_KEYS.OCO_API_URL, apiUrl]]);
}

View File

@@ -1,26 +0,0 @@
import { getGlobalConfig, setGlobalConfig } from '../commands/config';
export default function () {
const obsoleteKeys = [
'OCO_OLLAMA_API_KEY',
'OCO_OLLAMA_API_URL',
'OCO_ANTHROPIC_API_KEY',
'OCO_ANTHROPIC_BASE_PATH',
'OCO_OPENAI_API_KEY',
'OCO_OPENAI_BASE_PATH',
'OCO_AZURE_API_KEY',
'OCO_AZURE_ENDPOINT',
'OCO_GEMINI_API_KEY',
'OCO_GEMINI_BASE_PATH',
'OCO_FLOWISE_API_KEY',
'OCO_FLOWISE_ENDPOINT'
];
const globalConfig = getGlobalConfig();
const configToOverride = { ...globalConfig };
for (const key of obsoleteKeys) delete configToOverride[key];
setGlobalConfig(configToOverride);
}

View File

@@ -1,20 +0,0 @@
import {
ConfigType,
DEFAULT_CONFIG,
getGlobalConfig,
setConfig
} from '../commands/config';
export default function () {
const setDefaultConfigValues = (config: ConfigType) => {
const entriesToSet: [key: string, value: string | boolean | number][] = [];
for (const entry of Object.entries(DEFAULT_CONFIG)) {
const [key, _value] = entry;
if (config[key] === 'undefined') entriesToSet.push(entry);
}
if (entriesToSet.length > 0) setConfig(entriesToSet);
};
setDefaultConfigValues(getGlobalConfig());
}

View File

@@ -1,18 +0,0 @@
import migration00 from './00_use_single_api_key_and_url';
import migration01 from './01_remove_obsolete_config_keys_from_global_file';
import migration02 from './02_set_missing_default_values';
export const migrations = [
{
name: '00_use_single_api_key_and_url',
run: migration00
},
{
name: '01_remove_obsolete_config_keys_from_global_file',
run: migration01
},
{
name: '02_set_missing_default_values',
run: migration02
}
];

View File

@@ -1,70 +0,0 @@
import fs from 'fs';
import { homedir } from 'os';
import { join as pathJoin } from 'path';
import { migrations } from './_migrations';
import { outro } from '@clack/prompts';
import chalk from 'chalk';
import {
getConfig,
getIsGlobalConfigFileExist,
OCO_AI_PROVIDER_ENUM
} from '../commands/config';
const migrationsFile = pathJoin(homedir(), '.opencommit_migrations');
const getCompletedMigrations = (): string[] => {
if (!fs.existsSync(migrationsFile)) {
return [];
}
const data = fs.readFileSync(migrationsFile, 'utf-8');
return data ? JSON.parse(data) : [];
};
const saveCompletedMigration = (migrationName: string) => {
const completedMigrations = getCompletedMigrations();
completedMigrations.push(migrationName);
fs.writeFileSync(
migrationsFile,
JSON.stringify(completedMigrations, null, 2)
);
};
export const runMigrations = async () => {
// if no config file, we assume it's a new installation and no migrations are needed
if (!getIsGlobalConfigFileExist()) return;
const config = getConfig();
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
const completedMigrations = getCompletedMigrations();
let isMigrated = false;
for (const migration of migrations) {
if (!completedMigrations.includes(migration.name)) {
try {
console.log('Applying migration', migration.name);
migration.run();
console.log('Migration applied successfully', migration.name);
saveCompletedMigration(migration.name);
} catch (error) {
outro(
`${chalk.red('Failed to apply migration')} ${
migration.name
}: ${error}`
);
}
isMigrated = true;
}
}
if (isMigrated) {
outro(
`${chalk.green(
'✔'
)} Migrations to your config were applied successfully. Please rerun.`
);
process.exit(0);
}
};

View File

@@ -258,9 +258,7 @@ const INIT_MAIN_PROMPT = (
prompts: string[]
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
role: 'system',
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${
config.OCO_WHY ? 'and WHY the changes were done' : ''
}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
${
config.OCO_EMOJI
? 'Use GitMoji convention to preface the commit.'

View File

@@ -1,29 +1,13 @@
import fs from 'fs/promises';
import path from 'path';
const findModulePath = (moduleName: string) => {
const searchPaths = [
path.join('node_modules', moduleName),
path.join('node_modules', '.pnpm')
];
for (const basePath of searchPaths) {
try {
const resolvedPath = require.resolve(moduleName, { paths: [basePath] });
return resolvedPath;
} catch {
// Continue to the next search path if the module is not found
}
}
throw new Error(`Cannot find module ${moduleName}`);
};
const getCommitLintModuleType = async (): Promise<'cjs' | 'esm'> => {
const packageFile = '@commitlint/load/package.json';
const packageJsonPath = findModulePath(packageFile);
const packageFile = 'node_modules/@commitlint/load/package.json';
const packageJsonPath = path.join(
process.env.PWD || process.cwd(),
packageFile,
);
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
if (!packageJson) {
throw new Error(`Failed to parse ${packageFile}`);
}
@@ -35,7 +19,7 @@ const getCommitLintModuleType = async (): Promise<'cjs' | 'esm'> => {
* QualifiedConfig from any version of @commitlint/types
* @see https://github.com/conventional-changelog/commitlint/blob/master/@commitlint/types/src/load.ts
*/
type QualifiedConfigOnAnyVersion = { [key: string]: unknown };
type QualifiedConfigOnAnyVersion = { [key:string]: unknown };
/**
* This code is loading the configuration for the `@commitlint` package from the current working
@@ -43,31 +27,36 @@ type QualifiedConfigOnAnyVersion = { [key: string]: unknown };
*
* @returns
*/
export const getCommitLintPWDConfig =
async (): Promise<QualifiedConfigOnAnyVersion | null> => {
let load: Function, modulePath: string;
switch (await getCommitLintModuleType()) {
case 'cjs':
/**
* CommonJS (<= commitlint@v18.x.x.)
*/
modulePath = findModulePath('@commitlint/load');
load = require(modulePath).default;
break;
case 'esm':
/**
* ES Module (commitlint@v19.x.x. <= )
* Directory import is not supported in ES Module resolution, so import the file directly
*/
modulePath = await findModulePath('@commitlint/load/lib/load.js');
load = (await import(modulePath)).default;
break;
}
export const getCommitLintPWDConfig = async (): Promise<QualifiedConfigOnAnyVersion | null> => {
let load, nodeModulesPath;
switch (await getCommitLintModuleType()) {
case 'cjs':
/**
* CommonJS (<= commitlint@v18.x.x.)
*/
nodeModulesPath = path.join(
process.env.PWD || process.cwd(),
'node_modules/@commitlint/load',
);
load = require(nodeModulesPath).default;
break;
case 'esm':
/**
* ES Module (commitlint@v19.x.x. <= )
* Directory import is not supported in ES Module resolution, so import the file directly
*/
nodeModulesPath = path.join(
process.env.PWD || process.cwd(),
'node_modules/@commitlint/load/lib/load.js',
);
load = (await import(nodeModulesPath)).default;
break;
}
if (load && typeof load === 'function') {
return await load();
}
if (load && typeof load === 'function') {
return await load();
}
// @commitlint/load is not a function
return null;
};
// @commitlint/load is not a function
return null;
};

View File

@@ -2,9 +2,9 @@ import { getConfig, OCO_AI_PROVIDER_ENUM } from '../commands/config';
import { AnthropicEngine } from '../engine/anthropic';
import { AzureEngine } from '../engine/azure';
import { AiEngine } from '../engine/Engine';
import { FlowiseEngine } from '../engine/flowise';
import { GeminiEngine } from '../engine/gemini';
import { OllamaEngine } from '../engine/ollama';
import { FlowiseAi } from '../engine/flowise';
import { Gemini } from '../engine/gemini';
import { OllamaAi } from '../engine/ollama';
import { OpenAiEngine } from '../engine/openAi';
import { TestAi, TestMockType } from '../engine/testAi';
@@ -16,30 +16,50 @@ export function getEngine(): AiEngine {
model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_API_URL!,
apiKey: config.OCO_API_KEY!
baseURL: config.OCO_OPENAI_BASE_PATH!
};
switch (provider) {
case OCO_AI_PROVIDER_ENUM.OLLAMA:
return new OllamaEngine(DEFAULT_CONFIG);
return new OllamaAi({
...DEFAULT_CONFIG,
apiKey: '',
baseURL: config.OCO_OLLAMA_API_URL!
});
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
return new AnthropicEngine(DEFAULT_CONFIG);
return new AnthropicEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_ANTHROPIC_API_KEY!
});
case OCO_AI_PROVIDER_ENUM.TEST:
return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType);
case OCO_AI_PROVIDER_ENUM.GEMINI:
return new GeminiEngine(DEFAULT_CONFIG);
return new Gemini({
...DEFAULT_CONFIG,
apiKey: config.OCO_GEMINI_API_KEY!,
baseURL: config.OCO_GEMINI_BASE_PATH!
});
case OCO_AI_PROVIDER_ENUM.AZURE:
return new AzureEngine(DEFAULT_CONFIG);
return new AzureEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_AZURE_API_KEY!
});
case OCO_AI_PROVIDER_ENUM.FLOWISE:
return new FlowiseEngine(DEFAULT_CONFIG);
return new FlowiseAi({
...DEFAULT_CONFIG,
baseURL: config.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
apiKey: config.OCO_FLOWISE_API_KEY!
});
default:
return new OpenAiEngine(DEFAULT_CONFIG);
return new OpenAiEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_OPENAI_API_KEY!
});
}
}

View File

@@ -1,205 +0,0 @@
import path from 'path';
import 'cli-testing-library/extend-expect';
import { exec } from 'child_process';
import { prepareTempDir } from './utils';
import { promisify } from 'util';
import { render } from 'cli-testing-library';
import { resolve } from 'path';
import { rm } from 'fs';
const fsExec = promisify(exec);
const fsRemove = promisify(rm);
/**
* git remote -v
*
* [no remotes]
*/
const prepareNoRemoteGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
/**
* git remote -v
*
* origin /tmp/remote.git (fetch)
* origin /tmp/remote.git (push)
*/
const prepareOneRemoteGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
/**
* git remote -v
*
* origin /tmp/remote.git (fetch)
* origin /tmp/remote.git (push)
* other ../remote2.git (fetch)
* other ../remote2.git (push)
*/
const prepareTwoRemotesGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git init --bare other.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
await fsExec('git remote add other ../other.git', { cwd: gitDir });
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
describe('cli flow to push git branch', () => {
it('do nothing when OCO_GITPUSH is set to false', async () => {
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' OCO_GITPUSH='false' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await queryByText('Choose a remote to push to')
).not.toBeInTheConsole();
expect(
await queryByText('Do you want to run `git push`?')
).not.toBeInTheConsole();
expect(
await queryByText('Successfully pushed all commits to origin')
).not.toBeInTheConsole();
expect(
await queryByText('Command failed with exit code 1')
).not.toBeInTheConsole();
await cleanup();
});
it('push and cause error when there is no remote', async () => {
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await queryByText('Choose a remote to push to')
).not.toBeInTheConsole();
expect(
await queryByText('Do you want to run `git push`?')
).not.toBeInTheConsole();
expect(
await queryByText('Successfully pushed all commits to origin')
).not.toBeInTheConsole();
expect(
await findByText('Command failed with exit code 1')
).toBeInTheConsole();
await cleanup();
});
it('push when one remote is set', async () => {
const { gitDir, cleanup } = await prepareOneRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Do you want to run `git push`?')
).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Successfully pushed all commits to origin')
).toBeInTheConsole();
await cleanup();
});
it('push when two remotes are set', async () => {
const { gitDir, cleanup } = await prepareTwoRemotesGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Successfully pushed all commits to origin')
).toBeInTheConsole();
await cleanup();
});
});

View File

@@ -17,7 +17,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
@@ -46,7 +46,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
expect(await findByText('Successfully committed')).toBeInTheConsole();
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();

View File

@@ -181,7 +181,9 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
[],
{ cwd: gitDir }
);
expect(await commitlintGet.findByText('consistency')).toBeInTheConsole();
expect(
await commitlintGet.findByText('[object Object]')
).toBeInTheConsole();
// Run 'oco' using .opencommit-commitlint
await render('echo', [`'console.log("Hello World");' > index.ts`], {
@@ -209,7 +211,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
oco.userEvent.keyboard('[Enter]');
expect(
await oco.findByText('Do you want to run `git push`?')
await oco.findByText('Choose a remote to push to')
).toBeInTheConsole();
oco.userEvent.keyboard('[Enter]');

View File

@@ -15,7 +15,7 @@ export const prepareEnvironment = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
@@ -30,8 +30,4 @@ export const prepareEnvironment = async (): Promise<{
}
}
export const prepareTempDir = async(): Promise<string> => {
return await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
}
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));

View File

@@ -1,17 +1,10 @@
import { existsSync, readFileSync, rmSync } from 'fs';
import {
CONFIG_KEYS,
DEFAULT_CONFIG,
getConfig,
setConfig
} from '../../src/commands/config';
import { getConfig } from '../../src/commands/config';
import { prepareFile } from './utils';
import { dirname } from 'path';
describe('config', () => {
describe('getConfig', () => {
const originalEnv = { ...process.env };
let globalConfigFile: { filePath: string; cleanup: () => Promise<void> };
let envConfigFile: { filePath: string; cleanup: () => Promise<void> };
let localEnvFile: { filePath: string; cleanup: () => Promise<void> };
function resetEnv(env: NodeJS.ProcessEnv) {
Object.keys(process.env).forEach((key) => {
@@ -26,12 +19,7 @@ describe('config', () => {
beforeEach(async () => {
resetEnv(originalEnv);
if (globalConfigFile) await globalConfigFile.cleanup();
if (envConfigFile) await envConfigFile.cleanup();
});
afterEach(async () => {
if (globalConfigFile) await globalConfigFile.cleanup();
if (envConfigFile) await envConfigFile.cleanup();
if (localEnvFile) await localEnvFile.cleanup();
});
afterAll(() => {
@@ -48,256 +36,115 @@ describe('config', () => {
return await prepareFile(fileName, fileContent);
};
describe('getConfig', () => {
it('should prioritize local .env over global .opencommit config', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key',
OCO_MODEL: 'gpt-3.5-turbo',
OCO_LANGUAGE: 'en'
});
envConfigFile = await generateConfig('.env', {
OCO_API_KEY: 'local-key',
OCO_LANGUAGE: 'fr'
});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('local-key');
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
expect(config.OCO_LANGUAGE).toEqual('fr');
it('should prioritize local .env over global .opencommit config', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-3.5-turbo',
OCO_LANGUAGE: 'en'
});
it('should fallback to global config when local config is not set', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'de',
OCO_DESCRIPTION: 'true'
});
envConfigFile = await generateConfig('.env', {
OCO_API_URL: 'local-api-url'
});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('global-key');
expect(config.OCO_API_URL).toEqual('local-api-url');
expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('de');
expect(config.OCO_DESCRIPTION).toEqual(true);
localEnvFile = await generateConfig('.env', {
OCO_OPENAI_API_KEY: 'local-key',
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
OCO_LANGUAGE: 'fr'
});
it('should handle boolean and numeric values correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_TOKENS_MAX_INPUT: '4096',
OCO_TOKENS_MAX_OUTPUT: '500',
OCO_GITPUSH: 'true'
});
envConfigFile = await generateConfig('.env', {
OCO_TOKENS_MAX_INPUT: '8192',
OCO_ONE_LINE_COMMIT: 'false'
});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
expect(config.OCO_GITPUSH).toEqual(true);
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
const config = getConfig({
configPath: globalConfigFile.filePath,
envPath: localEnvFile.filePath
});
it('should handle empty local config correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es'
});
envConfigFile = await generateConfig('.env', {});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('global-key');
expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('es');
});
it('should override global config with null values in local .env', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es'
});
envConfigFile = await generateConfig('.env', {
OCO_API_KEY: 'null'
});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual(null);
});
it('should handle empty global config', async () => {
globalConfigFile = await generateConfig('.opencommit', {});
envConfigFile = await generateConfig('.env', {});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual(undefined);
});
expect(config).not.toEqual(null);
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
expect(config.OCO_LANGUAGE).toEqual('fr');
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
});
describe('setConfig', () => {
beforeEach(async () => {
// we create and delete the file to have the parent directory, but not the file, to test the creation of the file
globalConfigFile = await generateConfig('.opencommit', {});
rmSync(globalConfigFile.filePath);
it('should fallback to global config when local config is not set', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'de',
OCO_DESCRIPTION: 'true'
});
it('should create .opencommit file with DEFAULT CONFIG if it does not exist on first setConfig run', async () => {
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
expect(isGlobalConfigFileExist).toBe(false);
await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key_1']],
globalConfigFile.filePath
);
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent).toContain('OCO_API_KEY=persisted-key_1');
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
expect(fileContent).toContain(`${key}=${value}`);
});
localEnvFile = await generateConfig('.env', {
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
});
it('should set new config values', async () => {
globalConfigFile = await generateConfig('.opencommit', {});
await setConfig(
[
[CONFIG_KEYS.OCO_API_KEY, 'new-key'],
[CONFIG_KEYS.OCO_MODEL, 'gpt-4']
],
globalConfigFile.filePath
);
const config = getConfig({
globalPath: globalConfigFile.filePath
});
expect(config.OCO_API_KEY).toEqual('new-key');
expect(config.OCO_MODEL).toEqual('gpt-4');
const config = getConfig({
configPath: globalConfigFile.filePath,
envPath: localEnvFile.filePath
});
it('should update existing config values', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'initial-key'
});
await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'updated-key']],
globalConfigFile.filePath
);
expect(config).not.toEqual(null);
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('de');
expect(config.OCO_DESCRIPTION).toEqual(true);
});
const config = getConfig({
globalPath: globalConfigFile.filePath
});
expect(config.OCO_API_KEY).toEqual('updated-key');
it('should handle boolean and numeric values correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_TOKENS_MAX_INPUT: '4096',
OCO_TOKENS_MAX_OUTPUT: '500',
OCO_GITPUSH: 'true'
});
it('should handle boolean and numeric values correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {});
await setConfig(
[
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, '8192'],
[CONFIG_KEYS.OCO_DESCRIPTION, 'true'],
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT, 'false']
],
globalConfigFile.filePath
);
const config = getConfig({
globalPath: globalConfigFile.filePath
});
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
expect(config.OCO_DESCRIPTION).toEqual(true);
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
localEnvFile = await generateConfig('.env', {
OCO_TOKENS_MAX_INPUT: '8192',
OCO_ONE_LINE_COMMIT: 'false'
});
it('should throw an error for unsupported config keys', async () => {
globalConfigFile = await generateConfig('.opencommit', {});
try {
await setConfig(
[['UNSUPPORTED_KEY', 'value']],
globalConfigFile.filePath
);
throw new Error('NEVER_REACHED');
} catch (error) {
expect(error.message).toContain(
'Unsupported config key: UNSUPPORTED_KEY'
);
expect(error.message).not.toContain('NEVER_REACHED');
}
const config = getConfig({
configPath: globalConfigFile.filePath,
envPath: localEnvFile.filePath
});
it('should persist changes to the config file', async () => {
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
expect(isGlobalConfigFileExist).toBe(false);
expect(config).not.toEqual(null);
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
expect(config.OCO_GITPUSH).toEqual(true);
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
});
await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
globalConfigFile.filePath
);
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent).toContain('OCO_API_KEY=persisted-key');
it('should handle empty local config correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es'
});
it('should set multiple configs in a row and keep the changes', async () => {
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
expect(isGlobalConfigFileExist).toBe(false);
localEnvFile = await generateConfig('.env', {});
await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']],
globalConfigFile.filePath
);
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent1).toContain('OCO_API_KEY=persisted-key');
await setConfig(
[[CONFIG_KEYS.OCO_MODEL, 'gpt-4']],
globalConfigFile.filePath
);
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
const config = getConfig({
configPath: globalConfigFile.filePath,
envPath: localEnvFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('es');
});
it('should override global config with null values in local .env', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es'
});
localEnvFile = await generateConfig('.env', { OCO_OPENAI_API_KEY: 'null' });
const config = getConfig({
configPath: globalConfigFile.filePath,
envPath: localEnvFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
});
});

View File

@@ -1,4 +1,4 @@
import { GeminiEngine } from '../../src/engine/gemini';
import { Gemini } from '../../src/engine/gemini';
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
import {
@@ -9,7 +9,7 @@ import {
import { OpenAI } from 'openai';
describe('Gemini', () => {
let gemini: GeminiEngine;
let gemini: Gemini;
let mockConfig: ConfigType;
let mockGoogleGenerativeAi: GoogleGenerativeAI;
let mockGenerativeModel: GenerativeModel;
@@ -20,8 +20,8 @@ describe('Gemini', () => {
const mockGemini = () => {
mockConfig = getConfig() as ConfigType;
gemini = new GeminiEngine({
apiKey: mockConfig.OCO_API_KEY,
gemini = new Gemini({
apiKey: mockConfig.OCO_GEMINI_API_KEY,
model: mockConfig.OCO_MODEL
});
};
@@ -45,10 +45,12 @@ describe('Gemini', () => {
mockConfig = getConfig() as ConfigType;
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
mockConfig.OCO_API_KEY = 'mock-api-key';
mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key';
mockConfig.OCO_MODEL = 'gemini-1.5-flash';
mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_API_KEY);
mockGoogleGenerativeAi = new GoogleGenerativeAI(
mockConfig.OCO_GEMINI_API_KEY
);
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
model: mockConfig.OCO_MODEL
});

View File

@@ -1,7 +1,7 @@
import { existsSync, mkdtemp, rm, writeFile } from 'fs';
import { tmpdir } from 'os';
import path from 'path';
import { mkdtemp, rm, writeFile } from 'fs';
import { promisify } from 'util';
import { tmpdir } from 'os';
const fsMakeTempDir = promisify(mkdtemp);
const fsRemove = promisify(rm);
const fsWriteFile = promisify(writeFile);
@@ -20,9 +20,7 @@ export async function prepareFile(
const filePath = path.resolve(tempDir, fileName);
await fsWriteFile(filePath, content);
const cleanup = async () => {
if (existsSync(tempDir)) {
await fsRemove(tempDir, { recursive: true });
}
return fsRemove(tempDir, { recursive: true });
};
return {

View File

@@ -4,7 +4,7 @@
"lib": ["ES6", "ES2020"],
"module": "CommonJS",
"resolveJsonModule": true,
"moduleResolution": "Node",
@@ -21,7 +21,9 @@
"skipLibCheck": true
},
"include": ["test/jest-setup.ts"],
"include": [
"test/jest-setup.ts"
],
"exclude": ["node_modules"],
"ts-node": {
"esm": true,