Compare commits

..

26 Commits

Author SHA1 Message Date
di-sukharev
ca4be719b2 Merge branch 'dev' into oco_find_v1 2024-09-01 18:26:46 +03:00
di-sukharev
5e37fd29b7 Merge remote-tracking branch 'origin/dev' into oco_find_v1 2024-09-01 18:24:48 +03:00
di-sukharev
7286456a04 Merge remote-tracking branch 'origin/dev' into oco_find_v1 2024-08-27 17:09:44 +03:00
di-sukharev
85468823f9 feat(package.json): add uglify-js dependency for JavaScript minification
feat(find.ts): implement functions to find declarations and usages of functions,
generate call hierarchy, and create mermaid diagrams for better visualization of code structure
refactor(find.ts): improve findInFiles function to accept options for grep
and enhance the handling of occurrences for better clarity and usability
2024-08-27 16:46:27 +03:00
di-sukharev
7eb9a1b45c rename azure method 2024-08-25 22:34:22 +03:00
di-sukharev
825c2fe825 feat(commands): remove CommandsEnum.ts and integrate commands into ENUMS.ts for better organization
feat(cli): add findCommand to the CLI for enhanced functionality in searching
fix(commitlint): correct capitalization in intro message for consistency
fix(prepare-commit-msg-hook): correct capitalization in intro message for consistency
refactor(utils): rename getOpenCommitIgnore to getIgnoredFolders for clarity and improve ignored folder retrieval logic
2024-08-24 20:17:16 +03:00
di-sukharev
9dcb264420 test(config.test.ts): refactor generateConfig function to accept an object for content to improve readability and maintainability 2024-08-20 21:36:00 +03:00
di-sukharev
dd7fdba94e fix(config.ts): revert OCO_GITPUSH to its original position in the config object for clarity
refactor(config.ts): rename configFromEnv to envConfig for better readability
refactor(gemini.ts): simplify client initialization in the Gemini constructor
test(config.test.ts): add test case to check overriding global config with null values in local .env
test(gemini.test.ts): update AI provider assignment to use OCO_AI_PROVIDER_ENUM for consistency
2024-08-20 21:32:16 +03:00
di-sukharev
5fa12e2d4a feat(config): export OCO_AI_PROVIDER_ENUM to allow external access to AI provider constants
refactor(config): simplify mergeObjects function to improve readability and maintainability
refactor(setConfig): remove unnecessary keysToSet variable to streamline logging
refactor(engine): update switch cases to use OCO_AI_PROVIDER_ENUM for better consistency and clarity
2024-08-20 15:37:41 +03:00
di-sukharev
42a36492ad build 2024-08-20 15:37:33 +03:00
di-sukharev
443d27fc8d chore(docs): mark "Push to git" section in README as deprecated to inform users of upcoming changes
refactor(commit.ts): remove early return for non-pushing users to streamline commit process
refactor(config.ts): add deprecation comments for OCO_GITPUSH to indicate future removal
test(config.test.ts): enhance tests to ensure correct handling of local and global config priorities
test(gemini.test.ts): improve tests for Gemini class to ensure proper functionality and error handling
2024-08-20 15:34:09 +03:00
di-sukharev
04991dd00f fix(engine.ts): include DEFAULT_CONFIG in Gemini and Azure engine instantiation to ensure consistent configuration across engines 2024-08-20 12:58:00 +03:00
di-sukharev
3ded6062c1 fix: remove optional chaining from config access to ensure compatibility and prevent potential runtime errors
refactor(flowise.ts, ollama.ts): update axios client configuration to use a consistent URL format for API requests
fix: update README example to reflect the removal of optional chaining in config access
2024-08-20 12:32:40 +03:00
di-sukharev
f8584e7b78 refactor(engine): rename basePath to baseURL for consistency across interfaces and implementations
fix(engine): update Azure and Flowise engines to use baseURL instead of basePath for API configuration
fix(engine): adjust Ollama engine to handle baseURL and fallback to default URL
style(engine): clean up constructor formatting in OpenAiEngine for better readability
chore(engine): update getEngine function to use baseURL in configuration for all engines
2024-08-20 12:21:13 +03:00
di-sukharev
94faceefd3 remove mb confusing line 2024-08-20 12:06:01 +03:00
di-sukharev
720cd6f9c1 clear readme 2024-08-20 12:05:15 +03:00
di-sukharev
b6a92d557f docs(README.md): update author section and clarify API key storage details
docs(README.md): improve instructions for using OpenCommit CLI and configuration
fix(README.md): correct default model name to gpt-4o-mini in usage examples
fix(package.json): update openai package version to 4.56.0 for compatibility
2024-08-20 12:04:07 +03:00
di-sukharev
71354e4687 feat: add CommandsEnum to define command constants for better maintainability
refactor(generateCommitMessageFromGitDiff): update types for OpenAI messages to improve type safety
fix(commitlint/config): remove optional chaining for OCO_LANGUAGE to ensure proper access
refactor(commitlint/prompts): update types for OpenAI messages to improve type safety
refactor(prompts): update types for OpenAI messages to improve type safety
2024-08-20 12:03:40 +03:00
di-sukharev
8f85ee8f8e refactor(testAi.ts): update import statements to use OpenAI type for better clarity and maintainability
fix(testAi.ts): change parameter type in generateCommitMessage method to align with OpenAI's updated type definitions
2024-08-20 12:01:51 +03:00
di-sukharev
f9103a3c6a build 2024-08-20 12:01:38 +03:00
di-sukharev
4afd7de7a8 feat(commands): add COMMANDS enum to standardize command names across the application
refactor(commit.ts): restructure generateCommitMessageFromGitDiff function to use an interface for parameters and improve readability
fix(config.ts): update DEFAULT_TOKEN_LIMITS to correct values for max tokens input and output
chore(config.ts): enhance config validation to handle undefined and null values more effectively
style(commit.ts): improve formatting and consistency in the commit confirmation logic
style(config.ts): clean up error messages and improve clarity in config setting process
2024-08-20 12:01:14 +03:00
di-sukharev
5cfa3cded2 feat(engine): refactor AI engine interfaces and implementations to support multiple AI providers and improve configurability
- Introduce `AiEngineConfig` interface for consistent configuration across AI engines.
- Update `generateCommitMessage` method signatures to use `OpenAIClient.Chat.Completions.ChatCompletionMessageParam`.
- Implement specific configurations for each AI provider (Anthropic, Azure, Gemini, Ollama, OpenAI) to enhance flexibility.
- Replace hardcoded values with configurable parameters for model, API key, and token limits.
- Refactor client initialization to use Axios instances for better HTTP request handling.
- Remove deprecated code and improve error handling for better user feedback.
2024-08-20 11:58:19 +03:00
di-sukharev
bb0b0e804e build 2024-08-20 11:56:44 +03:00
di-sukharev
5d87cc514b feat(ENUMS.ts): add ENUMS file to centralize command constants
refactor(commitlint.ts): update import path to use ENUMS for command constants
refactor(config.ts): update import path to use ENUMS for command constants
refactor(githook.ts): update import path to use ENUMS for command constants
fix(prompts.ts): correct conventional commit keywords instruction text
2024-08-19 14:09:27 +03:00
di-sukharev
6f4e8fde93 docs(README.md): update usage examples to remove redundant 'opencommit' command
chore(example.txt): remove unused example.txt file
fix(config.ts): correct import order and improve validation messages
fix(githook.ts): improve error message for unsupported mode
fix(azure.ts): add non-null assertion for message content
fix(gemini.ts): use strict equality for role comparison
refactor(generateCommitMessageFromGitDiff.ts): reorder imports for consistency
refactor(github-action.ts): reorder imports for consistency
refactor(prompts.ts): simplify prompt content generation and improve readability
style(engine.ts): fix inconsistent spacing and import order
2024-08-19 14:00:08 +03:00
di-sukharev
745bb5218f update imports 2024-08-19 13:09:46 +03:00
32 changed files with 1373 additions and 5667 deletions

4
.gitignore vendored
View File

@@ -11,4 +11,6 @@ uncaughtExceptions.log
src/*.json src/*.json
.idea .idea
test.ts test.ts
notes.md notes.md
*.excalidraw
*.tldr

View File

@@ -28,19 +28,28 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
npm install -g opencommit npm install -g opencommit
``` ```
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys) or other supported LLM providers (we support them all). Make sure that you add your OpenAI payment details to your account, so the API works. Alternatively run it via `npx opencommit` or `bunx opencommit`, but you need to create ~/.opencommit config file in place.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
3. Set the key to OpenCommit config: 3. Set the key to OpenCommit config:
```sh ```sh
oco config set OCO_API_KEY=<your_api_key> oco config set OCO_OPENAI_API_KEY=<your_api_key>
``` ```
Your API key is stored locally in the `~/.opencommit` config file. Your API key is stored locally in the `~/.opencommit` config file.
## Usage ## Usage
You can call OpenCommit with `oco` command to generate a commit message for your staged changes: You can call OpenCommit directly to generate a commit message for your staged changes:
```sh
git add <files...>
opencommit
```
You can also use the `oco` shortcut:
```sh ```sh
git add <files...> git add <files...>
@@ -59,17 +68,21 @@ You can also run it with local model through ollama:
```sh ```sh
git add <files...> git add <files...>
oco config set OCO_AI_PROVIDER='ollama' OCO_MODEL='llama3:8b' oco config set OCO_AI_PROVIDER='ollama'
``` ```
Default model is `mistral`. If you want to use a model other than mistral (default), you can do so by setting the `OCO_AI_PROVIDER` environment variable as follows:
```sh
oco config set OCO_AI_PROVIDER='ollama/llama3:8b'
```
If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url. If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
You can do so by setting the `OCO_API_URL` environment variable as follows: You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
```sh ```sh
oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat' oco config set OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat'
``` ```
where 192.168.1.10 is example of endpoint URL, where you have ollama set up. where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
@@ -106,21 +119,22 @@ Create a `.env` file and add OpenCommit config variables there like this:
```env ```env
... ...
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise> OCO_OPENAI_API_KEY=<your OpenAI API token>
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
OCO_API_URL=<may be used to set proxy path to OpenAI api>
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)> OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)> OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes> OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
OCO_EMOJI=<boolean, add GitMoji> OCO_EMOJI=<boolean, add GitMoji>
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name> OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any string basically, but it should be a valid model name>
OCO_LANGUAGE=<locale, scroll to the bottom to see options> OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'> OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit> OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
OCO_ONE_LINE_COMMIT=<one line commit message, default: false> OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama or ollama/model>
...
``` ```
Global configs are same as local configs, but they are stored in the global `~/.opencommit` config file and set with `oco config set` command, e.g. `oco config set OCO_MODEL=gpt-4o`. This are not all the config options, but you get the point.
### Global config for all repos ### Global config for all repos
@@ -172,26 +186,26 @@ or for as a cheaper option:
oco config set OCO_MODEL=gpt-3.5-turbo oco config set OCO_MODEL=gpt-3.5-turbo
``` ```
### Switch to other LLM providers with a custom URL ### Switch to Azure OpenAI
By default OpenCommit uses [OpenAI](https://openai.com). By default OpenCommit uses [OpenAI](https://openai.com).
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/) or Flowise or Ollama. You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
```sh ```sh
oco config set OCO_AI_PROVIDER=azure OCO_API_KEY=<your_azure_api_key> OCO_API_URL=<your_azure_endpoint> opencommit config set OCO_AI_PROVIDER=azure
oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_API_URL=<your_flowise_endpoint>
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
``` ```
Of course need to set 'OCO_OPENAI_API_KEY'. And also need to set the
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
'model'.
### Locale configuration ### Locale configuration
To globally specify the language used to generate commit messages: To globally specify the language used to generate commit messages:
```sh ```sh
# de, German, Deutsch # de, German ,Deutsch
oco config set OCO_LANGUAGE=de oco config set OCO_LANGUAGE=de
oco config set OCO_LANGUAGE=German oco config set OCO_LANGUAGE=German
oco config set OCO_LANGUAGE=Deutsch oco config set OCO_LANGUAGE=Deutsch
@@ -207,14 +221,12 @@ All available languages are currently listed in the [i18n](https://github.com/di
### Push to git (gonna be deprecated) ### Push to git (gonna be deprecated)
A prompt for pushing to git is on by default but if you would like to turn it off just use: A prompt to ushing to git is on by default but if you would like to turn it off just use:
```sh ```sh
oco config set OCO_GITPUSH=false oco config set OCO_GITPUSH=false
``` ```
and it will exit right after commit is confirmed without asking if you would like to push to remote.
### Switch to `@commitlint` ### Switch to `@commitlint`
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config. OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
@@ -389,7 +401,7 @@ jobs:
# set openAI api key in repo actions secrets, # set openAI api key in repo actions secrets,
# for openAI keys go to: https://platform.openai.com/account/api-keys # for openAI keys go to: https://platform.openai.com/account/api-keys
# for repo secret go to: <your_repo_url>/settings/secrets/actions # for repo secret go to: <your_repo_url>/settings/secrets/actions
OCO_API_KEY: ${{ secrets.OCO_API_KEY }} OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
# customization # customization
OCO_TOKENS_MAX_INPUT: 4096 OCO_TOKENS_MAX_INPUT: 4096

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

399
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "opencommit", "name": "opencommit",
"version": "3.1.2", "version": "3.1.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "opencommit", "name": "opencommit",
"version": "3.1.2", "version": "3.1.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
@@ -28,7 +28,7 @@
"ignore": "^5.2.4", "ignore": "^5.2.4",
"ini": "^3.0.1", "ini": "^3.0.1",
"inquirer": "^9.1.4", "inquirer": "^9.1.4",
"openai": "^4.57.0" "openai": "^4.56.0"
}, },
"bin": { "bin": {
"oco": "out/cli.cjs", "oco": "out/cli.cjs",
@@ -956,38 +956,6 @@
"resolved": "https://registry.npmjs.org/@dqbd/tiktoken/-/tiktoken-1.0.13.tgz", "resolved": "https://registry.npmjs.org/@dqbd/tiktoken/-/tiktoken-1.0.13.tgz",
"integrity": "sha512-941kjlHjfI97l6NuH/AwuXV4mHuVnRooDcHNSlzi98hz+4ug3wT4gJcWjSwSZHqeGAEn90lC9sFD+8a9d5Jvxg==" "integrity": "sha512-941kjlHjfI97l6NuH/AwuXV4mHuVnRooDcHNSlzi98hz+4ug3wT4gJcWjSwSZHqeGAEn90lC9sFD+8a9d5Jvxg=="
}, },
"node_modules/@esbuild/android-arm": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.15.18.tgz",
"integrity": "sha512-5GT+kcs2WVGjVs7+boataCkO5Fg0y4kCjzkB5bAip7H4jfnOS3dA6KPiww9W1OEKTKeAcUVhdZGvgI65OXmUnw==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.15.18.tgz",
"integrity": "sha512-L4jVKS82XVhw2nvzLg/19ClLWg0y27ulRwuP7lcyL6AbUWB5aPglXY3M21mauDQMDfRLs8cQmeT03r/+X3cZYQ==",
"cpu": [
"loong64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/@eslint-community/eslint-utils": { "node_modules/@eslint-community/eslint-utils": {
"version": "4.4.0", "version": "4.4.0",
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
@@ -2098,11 +2066,6 @@
"form-data": "^4.0.0" "form-data": "^4.0.0"
} }
}, },
"node_modules/@types/qs": {
"version": "6.9.15",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.15.tgz",
"integrity": "sha512-uXHQKES6DQKKCLh441Xv/dwxOq1TVS3JPUMlEqoEglvlhR6Mxnlew/Xq/LRVHpLyk7iK3zODe1qYHIMltO7XGg=="
},
"node_modules/@types/semver": { "node_modules/@types/semver": {
"version": "7.5.8", "version": "7.5.8",
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz",
@@ -3683,54 +3646,6 @@
"esbuild-windows-arm64": "0.15.18" "esbuild-windows-arm64": "0.15.18"
} }
}, },
"node_modules/esbuild-android-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.15.18.tgz",
"integrity": "sha512-wnpt3OXRhcjfIDSZu9bnzT4/TNTDsOUvip0foZOUBG7QbSt//w3QV4FInVJxNhKc/ErhUxc5z4QjHtMi7/TbgA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-android-arm64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.15.18.tgz",
"integrity": "sha512-G4xu89B8FCzav9XU8EjsXacCKSG2FT7wW9J6hOc18soEHJdtWu03L3TQDGf0geNxfLTtxENKBzMSq9LlbjS8OQ==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-darwin-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.15.18.tgz",
"integrity": "sha512-2WAvs95uPnVJPuYKP0Eqx+Dl/jaYseZEUUT1sjg97TJa4oBtbAKnPnl3b5M9l51/nbx7+QAEtuummJZW0sBEmg==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-darwin-arm64": { "node_modules/esbuild-darwin-arm64": {
"version": "0.15.18", "version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.15.18.tgz", "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.15.18.tgz",
@@ -3747,262 +3662,6 @@
"node": ">=12" "node": ">=12"
} }
}, },
"node_modules/esbuild-freebsd-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.15.18.tgz",
"integrity": "sha512-TT3uBUxkteAjR1QbsmvSsjpKjOX6UkCstr8nMr+q7zi3NuZ1oIpa8U41Y8I8dJH2fJgdC3Dj3CXO5biLQpfdZA==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-freebsd-arm64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.15.18.tgz",
"integrity": "sha512-R/oVr+X3Tkh+S0+tL41wRMbdWtpWB8hEAMsOXDumSSa6qJR89U0S/PpLXrGF7Wk/JykfpWNokERUpCeHDl47wA==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-32": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.15.18.tgz",
"integrity": "sha512-lphF3HiCSYtaa9p1DtXndiQEeQDKPl9eN/XNoBf2amEghugNuqXNZA/ZovthNE2aa4EN43WroO0B85xVSjYkbg==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.15.18.tgz",
"integrity": "sha512-hNSeP97IviD7oxLKFuii5sDPJ+QHeiFTFLoLm7NZQligur8poNOWGIgpQ7Qf8Balb69hptMZzyOBIPtY09GZYw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-arm": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.15.18.tgz",
"integrity": "sha512-UH779gstRblS4aoS2qpMl3wjg7U0j+ygu3GjIeTonCcN79ZvpPee12Qun3vcdxX+37O5LFxz39XeW2I9bybMVA==",
"cpu": [
"arm"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-arm64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.15.18.tgz",
"integrity": "sha512-54qr8kg/6ilcxd+0V3h9rjT4qmjc0CccMVWrjOEM/pEcUzt8X62HfBSeZfT2ECpM7104mk4yfQXkosY8Quptug==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-mips64le": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.15.18.tgz",
"integrity": "sha512-Mk6Ppwzzz3YbMl/ZZL2P0q1tnYqh/trYZ1VfNP47C31yT0K8t9s7Z077QrDA/guU60tGNp2GOwCQnp+DYv7bxQ==",
"cpu": [
"mips64el"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-ppc64le": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.15.18.tgz",
"integrity": "sha512-b0XkN4pL9WUulPTa/VKHx2wLCgvIAbgwABGnKMY19WhKZPT+8BxhZdqz6EgkqCLld7X5qiCY2F/bfpUUlnFZ9w==",
"cpu": [
"ppc64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-riscv64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.15.18.tgz",
"integrity": "sha512-ba2COaoF5wL6VLZWn04k+ACZjZ6NYniMSQStodFKH/Pu6RxzQqzsmjR1t9QC89VYJxBeyVPTaHuBMCejl3O/xg==",
"cpu": [
"riscv64"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-linux-s390x": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.15.18.tgz",
"integrity": "sha512-VbpGuXEl5FCs1wDVp93O8UIzl3ZrglgnSQ+Hu79g7hZu6te6/YHgVJxCM2SqfIila0J3k0csfnf8VD2W7u2kzQ==",
"cpu": [
"s390x"
],
"dev": true,
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-netbsd-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.15.18.tgz",
"integrity": "sha512-98ukeCdvdX7wr1vUYQzKo4kQ0N2p27H7I11maINv73fVEXt2kyh4K4m9f35U1K43Xc2QGXlzAw0K9yoU7JUjOg==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-openbsd-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.15.18.tgz",
"integrity": "sha512-yK5NCcH31Uae076AyQAXeJzt/vxIo9+omZRKj1pauhk3ITuADzuOx5N2fdHrAKPxN+zH3w96uFKlY7yIn490xQ==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-sunos-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.15.18.tgz",
"integrity": "sha512-On22LLFlBeLNj/YF3FT+cXcyKPEI263nflYlAhz5crxtp3yRG1Ugfr7ITyxmCmjm4vbN/dGrb/B7w7U8yJR9yw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-windows-32": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.15.18.tgz",
"integrity": "sha512-o+eyLu2MjVny/nt+E0uPnBxYuJHBvho8vWsC2lV61A7wwTWC3jkN2w36jtA+yv1UgYkHRihPuQsL23hsCYGcOQ==",
"cpu": [
"ia32"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-windows-64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.15.18.tgz",
"integrity": "sha512-qinug1iTTaIIrCorAUjR0fcBk24fjzEedFYhhispP8Oc7SFvs+XeW3YpAKiKp8dRpizl4YYAhxMjlftAMJiaUw==",
"cpu": [
"x64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/esbuild-windows-arm64": {
"version": "0.15.18",
"resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.15.18.tgz",
"integrity": "sha512-q9bsYzegpZcLziq0zgUi5KqGVtfhjxGbnksaBFYmWLxeV/S1fK4OLdq2DFYnXcLMjlZw2L0jLsk1eGoB522WXQ==",
"cpu": [
"arm64"
],
"dev": true,
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=12"
}
},
"node_modules/escalade": { "node_modules/escalade": {
"version": "3.1.2", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz",
@@ -7224,17 +6883,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/object-inspect": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
"integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/once": { "node_modules/once": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -7258,19 +6906,17 @@
} }
}, },
"node_modules/openai": { "node_modules/openai": {
"version": "4.57.0", "version": "4.56.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.57.0.tgz", "resolved": "https://registry.npmjs.org/openai/-/openai-4.56.0.tgz",
"integrity": "sha512-JnwBSIYqiZ3jYjB5f2in8hQ0PRA092c6m+/6dYB0MzK0BEbn+0dioxZsPLBm5idJbg9xzLNOiGVm2OSuhZ+BdQ==", "integrity": "sha512-zcag97+3bG890MNNa0DQD9dGmmTWL8unJdNkulZzWRXrl+QeD+YkBI4H58rJcwErxqGK6a0jVPZ4ReJjhDGcmw==",
"dependencies": { "dependencies": {
"@types/node": "^18.11.18", "@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4", "@types/node-fetch": "^2.6.4",
"@types/qs": "^6.9.7",
"abort-controller": "^3.0.0", "abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1", "agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2", "form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2", "formdata-node": "^4.3.2",
"node-fetch": "^2.6.7", "node-fetch": "^2.6.7"
"qs": "^6.10.3"
}, },
"bin": { "bin": {
"openai": "bin/cli" "openai": "bin/cli"
@@ -7722,20 +7368,6 @@
} }
] ]
}, },
"node_modules/qs": {
"version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
"integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==",
"dependencies": {
"side-channel": "^1.0.6"
},
"engines": {
"node": ">=0.6"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/queue-microtask": { "node_modules/queue-microtask": {
"version": "1.2.3", "version": "1.2.3",
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
@@ -8082,23 +7714,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/side-channel": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"dependencies": {
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/signal-exit": { "node_modules/signal-exit": {
"version": "3.0.7", "version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",

View File

@@ -1,6 +1,6 @@
{ {
"name": "opencommit", "name": "opencommit",
"version": "3.1.2", "version": "3.1.1",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫", "description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [ "keywords": [
"git", "git",
@@ -88,6 +88,7 @@
"@google/generative-ai": "^0.11.4", "@google/generative-ai": "^0.11.4",
"@octokit/webhooks-schemas": "^6.11.0", "@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0", "@octokit/webhooks-types": "^6.11.0",
"ai": "^2.2.14",
"axios": "^1.3.4", "axios": "^1.3.4",
"chalk": "^5.2.0", "chalk": "^5.2.0",
"cleye": "^1.3.2", "cleye": "^1.3.2",
@@ -96,6 +97,6 @@
"ignore": "^5.2.4", "ignore": "^5.2.4",
"ini": "^3.0.1", "ini": "^3.0.1",
"inquirer": "^9.1.4", "inquirer": "^9.1.4",
"openai": "^4.57.0" "openai": "^4.56.0"
} }
} }

View File

@@ -1,5 +0,0 @@
export enum COMMANDS {
config = 'config',
hook = 'hook',
commitlint = 'commitlint'
}

View File

@@ -9,7 +9,7 @@ import { configCommand } from './commands/config';
import { hookCommand, isHookCalled } from './commands/githook.js'; import { hookCommand, isHookCalled } from './commands/githook.js';
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook'; import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
import { checkIsLatestVersion } from './utils/checkIsLatestVersion'; import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
import { runMigrations } from './migrations/_run.js'; import { findCommand } from './commands/find';
const extraArgs = process.argv.slice(2); const extraArgs = process.argv.slice(2);
@@ -17,7 +17,12 @@ cli(
{ {
version: packageJSON.version, version: packageJSON.version,
name: 'opencommit', name: 'opencommit',
commands: [configCommand, hookCommand, commitlintConfigCommand], commands: [
configCommand,
hookCommand,
commitlintConfigCommand,
findCommand
],
flags: { flags: {
fgm: Boolean, fgm: Boolean,
yes: { yes: {
@@ -31,7 +36,6 @@ cli(
help: { description: packageJSON.description } help: { description: packageJSON.description }
}, },
async ({ flags }) => { async ({ flags }) => {
await runMigrations();
await checkIsLatestVersion(); await checkIsLatestVersion();
if (await isHookCalled()) { if (await isHookCalled()) {

View File

@@ -1,5 +1,6 @@
export enum COMMANDS { export enum COMMANDS {
config = 'config', config = 'config',
hook = 'hook', hook = 'hook',
commitlint = 'commitlint' commitlint = 'commitlint',
find = 'find'
} }

View File

@@ -107,16 +107,13 @@ ${chalk.grey('——————————————————')}`
const remotes = await getGitRemotes(); const remotes = await getGitRemotes();
// user isn't pushing, return early
if (config.OCO_GITPUSH === false) return;
if (!remotes.length) { if (!remotes.length) {
const { stdout } = await execa('git', ['push']); const { stdout } = await execa('git', ['push']);
if (stdout) outro(stdout); if (stdout) outro(stdout);
process.exit(0); process.exit(0);
} }
if (remotes.length === 1) { if (remotes.length === 1 && config.OCO_GITPUSH !== true) {
const isPushConfirmedByUser = await confirm({ const isPushConfirmedByUser = await confirm({
message: 'Do you want to run `git push`?' message: 'Do you want to run `git push`?'
}); });
@@ -159,13 +156,13 @@ ${chalk.grey('——————————————————')}`
const { stdout } = await execa('git', ['push', selectedRemote]); const { stdout } = await execa('git', ['push', selectedRemote]);
if (stdout) outro(stdout);
pushSpinner.stop( pushSpinner.stop(
`${chalk.green( `${chalk.green(
'✔' '✔'
)} successfully pushed all commits to ${selectedRemote}` )} Successfully pushed all commits to ${selectedRemote}`
); );
if (stdout) outro(stdout);
} }
} else { } else {
const regenerateMessage = await confirm({ const regenerateMessage = await confirm({

View File

@@ -16,7 +16,7 @@ export const commitlintConfigCommand = command(
parameters: ['<mode>'] parameters: ['<mode>']
}, },
async (argv) => { async (argv) => {
intro('opencommit — configure @commitlint'); intro('OpenCommit — configure @commitlint');
try { try {
const { mode } = argv._; const { mode } = argv._;

View File

@@ -11,9 +11,14 @@ import { TEST_MOCK_TYPES } from '../engine/testAi';
import { getI18nLocal, i18n } from '../i18n'; import { getI18nLocal, i18n } from '../i18n';
export enum CONFIG_KEYS { export enum CONFIG_KEYS {
OCO_API_KEY = 'OCO_API_KEY', OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT', OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT', OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
OCO_DESCRIPTION = 'OCO_DESCRIPTION', OCO_DESCRIPTION = 'OCO_DESCRIPTION',
OCO_EMOJI = 'OCO_EMOJI', OCO_EMOJI = 'OCO_EMOJI',
OCO_MODEL = 'OCO_MODEL', OCO_MODEL = 'OCO_MODEL',
@@ -22,10 +27,14 @@ export enum CONFIG_KEYS {
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER', OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE', OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER', OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT', OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE', OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
OCO_API_URL = 'OCO_API_URL', OCO_API_URL = 'OCO_API_URL',
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
} }
export enum CONFIG_MODES { export enum CONFIG_MODES {
@@ -114,19 +123,65 @@ const validateConfig = (
}; };
export const configValidators = { export const configValidators = {
[CONFIG_KEYS.OCO_API_KEY](value: any, config: any = {}) { [CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'openai') return value; if (config.OCO_AI_PROVIDER !== 'openai') return value;
validateConfig( validateConfig(
'OCO_API_KEY', 'OCO_OPENAI_API_KEY',
typeof value === 'string' && value.length > 0, typeof value === 'string' && value.length > 0,
'Empty value is not allowed' 'Empty value is not allowed'
); );
validateConfig( validateConfig(
'OCO_API_KEY', 'OCO_OPENAI_API_KEY',
value, value,
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`' 'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'azure') return value;
validateConfig(
'OCO_AZURE_API_KEY',
!!value,
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_GEMINI_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'gemini') return value;
validateConfig(
'OCO_GEMINI_API_KEY',
value || config.OCO_GEMINI_API_KEY || config.OCO_AI_PROVIDER === 'test',
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
if (config.OCO_AI_PROVIDER !== 'anthropic') return value;
validateConfig(
'ANTHROPIC_API_KEY',
!!value,
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
);
return value;
},
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
validateConfig(
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
value || config.OCO_AI_PROVIDER !== 'flowise',
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
); );
return value; return value;
@@ -186,11 +241,11 @@ export const configValidators = {
return getI18nLocal(value); return getI18nLocal(value);
}, },
[CONFIG_KEYS.OCO_API_URL](value: any) { [CONFIG_KEYS.OCO_OPENAI_BASE_PATH](value: any) {
validateConfig( validateConfig(
CONFIG_KEYS.OCO_API_URL, CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
typeof value === 'string', typeof value === 'string',
`${value} is not a valid URL. It should start with 'http://' or 'https://'.` 'Must be string'
); );
return value; return value;
}, },
@@ -260,6 +315,26 @@ export const configValidators = {
return value; return value;
}, },
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
value.includes('openai.azure.com'),
'Must be in format "https://<resource name>.openai.azure.com/"'
);
return value;
},
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
typeof value === 'string' && value.includes(':'),
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitly, there is no pattern to verify, except a column for the port number
);
return value;
},
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) { [CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
validateConfig( validateConfig(
CONFIG_KEYS.OCO_TEST_MOCK_TYPE, CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
@@ -271,11 +346,11 @@ export const configValidators = {
return value; return value;
}, },
[CONFIG_KEYS.OCO_WHY](value: any) { [CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) {
validateConfig( validateConfig(
CONFIG_KEYS.OCO_WHY, CONFIG_KEYS.OCO_OLLAMA_API_URL,
typeof value === 'boolean', typeof value === 'string' && value.startsWith('http'),
'Must be true or false' `${value} is not a valid URL. It should start with 'http://' or 'https://'.`
); );
return value; return value;
} }
@@ -292,10 +367,14 @@ export enum OCO_AI_PROVIDER_ENUM {
} }
export type ConfigType = { export type ConfigType = {
[CONFIG_KEYS.OCO_API_KEY]?: string; [CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string;
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string;
[CONFIG_KEYS.OCO_AZURE_API_KEY]?: string;
[CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string;
[CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string;
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number; [CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number; [CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
[CONFIG_KEYS.OCO_API_URL]?: string; [CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean; [CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
[CONFIG_KEYS.OCO_EMOJI]: boolean; [CONFIG_KEYS.OCO_EMOJI]: boolean;
[CONFIG_KEYS.OCO_WHY]: boolean; [CONFIG_KEYS.OCO_WHY]: boolean;
@@ -306,11 +385,16 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM; [CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
[CONFIG_KEYS.OCO_GITPUSH]: boolean; [CONFIG_KEYS.OCO_GITPUSH]: boolean;
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean; [CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
[CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string;
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string; [CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
[CONFIG_KEYS.OCO_API_URL]?: string;
[CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string;
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string;
[CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string;
}; };
export const defaultConfigPath = pathJoin(homedir(), '.opencommit'); const defaultConfigPath = pathJoin(homedir(), '.opencommit');
export const defaultEnvPath = pathResolve(process.cwd(), '.env'); const defaultEnvPath = pathResolve(process.cwd(), '.env');
const assertConfigsAreValid = (config: Record<string, any>) => { const assertConfigsAreValid = (config: Record<string, any>) => {
for (const [key, value] of Object.entries(config)) { for (const [key, value] of Object.entries(config)) {
@@ -362,7 +446,7 @@ const initGlobalConfig = (configPath: string = defaultConfigPath) => {
return DEFAULT_CONFIG; return DEFAULT_CONFIG;
}; };
const parseConfigVarValue = (value?: any) => { const parseEnvVarValue = (value?: any) => {
try { try {
return JSON.parse(value); return JSON.parse(value);
} catch (error) { } catch (error) {
@@ -375,45 +459,41 @@ const getEnvConfig = (envPath: string) => {
return { return {
OCO_MODEL: process.env.OCO_MODEL, OCO_MODEL: process.env.OCO_MODEL,
OCO_API_URL: process.env.OCO_API_URL,
OCO_API_KEY: process.env.OCO_API_KEY,
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT), OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
OCO_TOKENS_MAX_OUTPUT: parseConfigVarValue( OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
process.env.OCO_TOKENS_MAX_OUTPUT OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
), OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
OCO_DESCRIPTION: parseConfigVarValue(process.env.OCO_DESCRIPTION), OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI), OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
OCO_LANGUAGE: process.env.OCO_LANGUAGE, OCO_LANGUAGE: process.env.OCO_LANGUAGE,
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM, OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT), OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE, OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH) // todo: deprecate OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
}; };
}; };
export const setGlobalConfig = ( const getGlobalConfig = (configPath: string) => {
config: ConfigType,
configPath: string = defaultConfigPath
) => {
writeFileSync(configPath, iniStringify(config), 'utf8');
};
export const getIsGlobalConfigFileExist = (
configPath: string = defaultConfigPath
) => {
return existsSync(configPath);
};
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
let globalConfig: ConfigType; let globalConfig: ConfigType;
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath); const isGlobalConfigFileExist = existsSync(configPath);
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath); if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
else { else {
const configFile = readFileSync(configPath, 'utf8'); const configFile = readFileSync(configPath, 'utf8');
@@ -430,18 +510,16 @@ export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
* @param fallback - global ~/.opencommit config file * @param fallback - global ~/.opencommit config file
* @returns merged config * @returns merged config
*/ */
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => { const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) =>
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]); Object.keys(CONFIG_KEYS).reduce((acc, key) => {
return Array.from(allKeys).reduce((acc, key) => { acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
return acc; return acc;
}, {} as ConfigType); }, {} as ConfigType);
};
interface GetConfigOptions { interface GetConfigOptions {
globalPath?: string; globalPath?: string;
envPath?: string; envPath?: string;
setDefaultValues?: boolean;
} }
export const getConfig = ({ export const getConfig = ({
@@ -457,15 +535,13 @@ export const getConfig = ({
}; };
export const setConfig = ( export const setConfig = (
keyValues: [key: string, value: string | boolean | number | null][], keyValues: [key: string, value: string][],
globalConfigPath: string = defaultConfigPath globalConfigPath: string = defaultConfigPath
) => { ) => {
const config = getConfig({ const config = getConfig({
globalPath: globalConfigPath globalPath: globalConfigPath
}); });
const configToSet = {};
for (let [key, value] of keyValues) { for (let [key, value] of keyValues) {
if (!configValidators.hasOwnProperty(key)) { if (!configValidators.hasOwnProperty(key)) {
const supportedKeys = Object.keys(configValidators).join('\n'); const supportedKeys = Object.keys(configValidators).join('\n');
@@ -477,8 +553,7 @@ export const setConfig = (
let parsedConfigValue; let parsedConfigValue;
try { try {
if (typeof value === 'string') parsedConfigValue = JSON.parse(value); parsedConfigValue = JSON.parse(value);
else parsedConfigValue = value;
} catch (error) { } catch (error) {
parsedConfigValue = value; parsedConfigValue = value;
} }
@@ -488,10 +563,10 @@ export const setConfig = (
config config
); );
configToSet[key] = validValue; config[key] = validValue;
} }
setGlobalConfig(mergeConfigs(configToSet, config), globalConfigPath); writeFileSync(globalConfigPath, iniStringify(config), 'utf8');
outro(`${chalk.green('✔')} config successfully set`); outro(`${chalk.green('✔')} config successfully set`);
}; };

372
src/commands/find.ts Normal file
View File

@@ -0,0 +1,372 @@
import {
confirm,
intro,
isCancel,
note,
outro,
select,
spinner
} from '@clack/prompts';
import chalk from 'chalk';
import { command } from 'cleye';
import { execa } from 'execa';
import { getIgnoredFolders } from '../utils/git';
import { COMMANDS } from './ENUMS';
import { OpenAiEngine } from '../engine/openAi';
import { getConfig } from './config';
type Occurrence = {
fileName: string;
context: {
number: number;
content: string;
};
matches: {
number: number;
content: string;
}[];
};
/*
TODO:
- [ ] format declarations as file:line => context -> declaration
- [ ] format usages as file:line => context -> usage
- [ ] expand on usage to see it's call hierarchy
- [ ] generate Mermaid diagram
*/
const generateMermaid = async (stdout: string) => {
const config = getConfig();
const DEFAULT_CONFIG = {
model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_OPENAI_BASE_PATH!
};
const engine = new OpenAiEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_OPENAI_API_KEY!
});
const diagram = await engine.generateCommitMessage([
{
role: 'system',
content: `You are to generate a mermaid diagram from the given function. Strictly answer in this json format: { "mermaid": "<mermaid diagram>" }. Where <mermaid diagram> is a valid mermaid diagram, e.g:
graph TD
A[Start] --> B[Generate Commit Message]
B --> C{Token count >= Max?}
C -->|Yes| D[Process file diffs]
C -->|No| E[Generate single message]
D --> F[Join messages]
E --> G[Generate message]
F --> H[End]
G --> H
B --> I{Error occurred?}
I -->|Yes| J[Handle error]
J --> H
I -->|No| H
`
},
{
role: 'user',
content: stdout
}
]);
return JSON.parse(diagram as string);
};
export function extractFuncName(line: string) {
const regex =
/(?:function|export\s+const|const|let|var)?\s*(?:async\s+)?(\w+)\s*(?:=\s*(?:async\s*)?\(|\()/;
const match = line.match(regex);
return match ? match[1] : null;
}
function extractSingle(lineContent: string): string | null {
const match = lineContent.match(/\s*(?:public\s+)?(?:async\s+)?(\w+)\s*=/);
return match ? match[1] : null;
}
function mapLinesToOccurrences(input: string[], step: number = 3) {
const occurrences: Occurrence[] = [];
let single;
for (let i = 0; i < input.length; i += step) {
if (i + 1 >= input.length) break;
const [fileName, callerLineNumber, ...callerLineContent] =
input[i].split(/[=:]/);
const [, definitionLineNumber, ...definitionLineContent] =
input[i + 1].split(/[:]/);
if (!single) single = extractSingle(definitionLineContent.join(':'));
occurrences.push({
fileName,
context: {
number: parseInt(callerLineNumber, 10),
content: callerLineContent.join('=').trim()
},
matches: [
{
number: parseInt(definitionLineNumber, 10),
content: definitionLineContent.join(':').trim()
}
]
});
}
return { occurrences, single };
}
const findDeclarations = async (query: string[], ignoredFolders: string[]) => {
const searchQuery = `(async|function|public).*${query.join('[^ \\n]*')}`;
outro(`Searching: ${searchQuery}`);
const occurrences = await findInFiles({ query: searchQuery, ignoredFolders });
if (!occurrences) return null;
const declarations = mapLinesToOccurrences(occurrences.split('\n'));
return declarations;
};
const findUsagesByDeclaration = async (
declaration: string,
ignoredFolders: string[]
) => {
const searchQuery = `${declaration}\\(.*\\)`;
const occurrences = await findInFiles({
query: searchQuery,
ignoredFolders
// grepOptions: ['--function-context']
});
if (!occurrences) return null;
const usages = mapLinesToOccurrences(
occurrences.split('\n').filter(Boolean),
2
);
return usages;
};
const buildCallHierarchy = async (
query: string[],
ignoredFolders: string[]
) => {};
const findInFiles = async ({
query,
ignoredFolders,
grepOptions = []
}: {
query: string;
ignoredFolders: string[];
grepOptions?: string[];
}): Promise<string | null> => {
const withIgnoredFolders =
ignoredFolders.length > 0
? [
'--',
' ',
'.',
' ',
ignoredFolders.map((folder) => `:^${folder}`).join(' ')
]
: [];
const params = [
'--no-pager',
'grep',
'--show-function', // show function caller
'-n',
'-i',
...grepOptions,
'--break',
'--color=never',
// '-C',
// '1',
// '--full-name',
// '--heading',
'--threads',
'10',
'-E',
query,
...withIgnoredFolders
];
try {
const { stdout } = await execa('git', params);
return stdout;
} catch (error) {
return null;
}
};
const generatePermutations = (arr: string[]): string[][] => {
const n = arr.length;
const result: string[][] = [];
const indices = new Int32Array(n);
const current = new Array(n);
for (let i = 0; i < n; i++) {
indices[i] = i;
current[i] = arr[i];
}
result.push([...current]);
let i = 1;
while (i < n) {
if (indices[i] > 0) {
const j = indices[i] % 2 === 1 ? 0 : indices[i];
[current[i], current[j]] = [current[j], current[i]];
result.push([...current]);
indices[i]--;
i = 1;
} else {
indices[i] = i;
i++;
}
}
return result;
};
const shuffleQuery = (query: string[]): string[][] => {
return generatePermutations(query);
};
export const findCommand = command(
{
name: COMMANDS.find,
parameters: ['<query...>']
},
async (argv) => {
const query = argv._;
intro(`OpenCommit — 🔦 find`);
const ignoredFolders = getIgnoredFolders();
const searchSpinner = spinner();
let declarations = await findDeclarations(query, ignoredFolders);
outro(`No matches found. Searching semantically similar queries.`);
searchSpinner.start(`Searching for matches...`);
if (!declarations?.occurrences.length) {
const allPossibleQueries = shuffleQuery(query).reverse();
for (const possibleQuery of allPossibleQueries) {
declarations = await findDeclarations(possibleQuery, ignoredFolders);
if (declarations?.occurrences.length) break;
}
}
if (!declarations?.occurrences.length) {
searchSpinner.stop(`${chalk.red('✘')} No function declarations found.`);
return process.exit(1);
}
const usages = await findUsagesByDeclaration(
declarations.single,
ignoredFolders
);
searchSpinner.stop(
`${chalk.green('✔')} Found ${chalk.green(
declarations.single
)} definition and ${usages?.occurrences.length} usages.`
);
note(
declarations.occurrences
.map((o) =>
o.matches
.map(
(m) =>
`${o.fileName}:${m.number} ${chalk.cyan(
'==>'
)} ${m.content.replace(
declarations.single,
chalk.green(declarations.single)
)}`
)
.join('\n')
)
.join('\n'),
'⍜ DECLARATIONS ⍜'
);
note(
usages?.occurrences
.map((o) =>
o.matches.map(
(m) =>
`${o.fileName}:${m.number} ${chalk.cyan(
'==>'
)} ${m.content.replace(
declarations.single,
chalk.green(declarations.single)
)}`
)
)
.join('\n'),
'⌾ USAGES ⌾'
);
const usage = (await select({
message: chalk.cyan('Expand usage:'),
options: usages!.occurrences
.map((o) =>
o.matches.map((m) => ({
value: { o, m },
label: `${chalk.yellow(`${o.fileName}:${m.number}`)} ${chalk.cyan(
'==>'
)} ${m.content.replace(
declarations.single,
chalk.green(declarations.single)
)}`,
hint: `parent: ${extractFuncName(o.context.content) ?? '404'}`
}))
)
.flat()
})) as { o: Occurrence; m: any };
if (isCancel(usage)) process.exit(1);
const { stdout } = await execa('git', [
'--no-pager',
'grep',
'--function-context',
'--heading',
'-E',
usage.m.content.replace('(', '\\(').replace(')', '\\)'),
usage.o.fileName
]);
const mermaidSpinner = spinner();
mermaidSpinner.start('Generating mermaid diagram...');
const mermaid: any = await generateMermaid(stdout);
mermaidSpinner.stop();
if (mermaid) console.log(mermaid.mermaid);
else note('No mermaid diagram found.');
const isCommitConfirmedByUser = await confirm({
message: 'Create Excalidraw file?'
});
if (isCommitConfirmedByUser) outro('created diagram.excalidraw');
else outro('Excalidraw file not created.');
}
);

View File

@@ -35,15 +35,18 @@ export const prepareCommitMessageHook = async (
if (!staged) return; if (!staged) return;
intro('opencommit'); intro('OpenCommit');
const config = getConfig(); const config = getConfig();
if (!config.OCO_API_KEY) { if (
outro( !config.OCO_OPENAI_API_KEY &&
'No OCO_API_KEY is set. Set your key via `oco config set OCO_API_KEY=<value>. For more info see https://github.com/di-sukharev/opencommit' !config.OCO_ANTHROPIC_API_KEY &&
!config.OCO_AZURE_API_KEY
) {
throw new Error(
'No OPEN_AI_API or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key in ~/.opencommit'
); );
return;
} }
const spin = spinner(); const spin = spinner();

View File

@@ -27,9 +27,9 @@ export class AzureEngine implements AiEngine {
); );
} }
generateCommitMessage = async ( async generateCommitMessage(
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam> messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | undefined> => { ): Promise<string | undefined> {
try { try {
const REQUEST_TOKENS = messages const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content as string) + 4) .map((msg) => tokenCount(msg.content as string) + 4)
@@ -73,5 +73,5 @@ export class AzureEngine implements AiEngine {
throw err; throw err;
} }
}; }
} }

View File

@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface FlowiseAiConfig extends AiEngineConfig {} interface FlowiseAiConfig extends AiEngineConfig {}
export class FlowiseEngine implements AiEngine { export class FlowiseAi implements AiEngine {
config: FlowiseAiConfig; config: FlowiseAiConfig;
client: AxiosInstance; client: AxiosInstance;

View File

@@ -11,7 +11,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface GeminiConfig extends AiEngineConfig {} interface GeminiConfig extends AiEngineConfig {}
export class GeminiEngine implements AiEngine { export class Gemini implements AiEngine {
config: GeminiConfig; config: GeminiConfig;
client: GoogleGenerativeAI; client: GoogleGenerativeAI;

View File

@@ -4,7 +4,7 @@ import { AiEngine, AiEngineConfig } from './Engine';
interface OllamaConfig extends AiEngineConfig {} interface OllamaConfig extends AiEngineConfig {}
export class OllamaEngine implements AiEngine { export class OllamaAi implements AiEngine {
config: OllamaConfig; config: OllamaConfig;
client: AxiosInstance; client: AxiosInstance;
@@ -28,10 +28,7 @@ export class OllamaEngine implements AiEngine {
stream: false stream: false
}; };
try { try {
const response = await this.client.post( const response = await this.client.post('', params);
this.client.getUri(this.config),
params
);
const message = response.data.message; const message = response.data.message;

View File

@@ -6,8 +6,11 @@ import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount'; import { tokenCount } from './utils/tokenCount';
const config = getConfig(); const config = getConfig();
const MAX_TOKENS_INPUT = config.OCO_TOKENS_MAX_INPUT; const MAX_TOKENS_INPUT =
const MAX_TOKENS_OUTPUT = config.OCO_TOKENS_MAX_OUTPUT; config.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
const MAX_TOKENS_OUTPUT =
config.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const generateCommitMessageChatCompletionPrompt = async ( const generateCommitMessageChatCompletionPrompt = async (
diff: string, diff: string,

View File

@@ -1,45 +0,0 @@
import {
CONFIG_KEYS,
getConfig,
OCO_AI_PROVIDER_ENUM,
setConfig
} from '../commands/config';
export default function () {
const config = getConfig({ setDefaultValues: false });
const aiProvider = config.OCO_AI_PROVIDER;
let apiKey: string | undefined;
let apiUrl: string | undefined;
if (aiProvider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
apiKey = config['OCO_OLLAMA_API_KEY'];
apiUrl = config['OCO_OLLAMA_API_URL'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.ANTHROPIC) {
apiKey = config['OCO_ANTHROPIC_API_KEY'];
apiUrl = config['OCO_ANTHROPIC_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.OPENAI) {
apiKey = config['OCO_OPENAI_API_KEY'];
apiUrl = config['OCO_OPENAI_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.AZURE) {
apiKey = config['OCO_AZURE_API_KEY'];
apiUrl = config['OCO_AZURE_ENDPOINT'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.GEMINI) {
apiKey = config['OCO_GEMINI_API_KEY'];
apiUrl = config['OCO_GEMINI_BASE_PATH'];
} else if (aiProvider === OCO_AI_PROVIDER_ENUM.FLOWISE) {
apiKey = config['OCO_FLOWISE_API_KEY'];
apiUrl = config['OCO_FLOWISE_ENDPOINT'];
} else {
throw new Error(
`Migration failed, set AI provider first. Run "oco config set OCO_AI_PROVIDER=<provider>", where <provider> is one of: ${Object.values(
OCO_AI_PROVIDER_ENUM
).join(', ')}`
);
}
if (apiKey) setConfig([[CONFIG_KEYS.OCO_API_KEY, apiKey]]);
if (apiUrl) setConfig([[CONFIG_KEYS.OCO_API_URL, apiUrl]]);
}

View File

@@ -1,26 +0,0 @@
import { getGlobalConfig, setGlobalConfig } from '../commands/config';
export default function () {
const obsoleteKeys = [
'OCO_OLLAMA_API_KEY',
'OCO_OLLAMA_API_URL',
'OCO_ANTHROPIC_API_KEY',
'OCO_ANTHROPIC_BASE_PATH',
'OCO_OPENAI_API_KEY',
'OCO_OPENAI_BASE_PATH',
'OCO_AZURE_API_KEY',
'OCO_AZURE_ENDPOINT',
'OCO_GEMINI_API_KEY',
'OCO_GEMINI_BASE_PATH',
'OCO_FLOWISE_API_KEY',
'OCO_FLOWISE_ENDPOINT'
];
const globalConfig = getGlobalConfig();
const configToOverride = { ...globalConfig };
for (const key of obsoleteKeys) delete configToOverride[key];
setGlobalConfig(configToOverride);
}

View File

@@ -1,20 +0,0 @@
import {
ConfigType,
DEFAULT_CONFIG,
getGlobalConfig,
setConfig
} from '../commands/config';
export default function () {
const setDefaultConfigValues = (config: ConfigType) => {
const entriesToSet: [key: string, value: string | boolean | number][] = [];
for (const entry of Object.entries(DEFAULT_CONFIG)) {
const [key, _value] = entry;
if (config[key] === 'undefined') entriesToSet.push(entry);
}
if (entriesToSet.length > 0) setConfig(entriesToSet);
};
setDefaultConfigValues(getGlobalConfig());
}

View File

@@ -1,18 +0,0 @@
import migration00 from './00_use_single_api_key_and_url';
import migration01 from './01_remove_obsolete_config_keys_from_global_file';
import migration02 from './02_set_missing_default_values';
export const migrations = [
{
name: '00_use_single_api_key_and_url',
run: migration00
},
{
name: '01_remove_obsolete_config_keys_from_global_file',
run: migration01
},
{
name: '02_set_missing_default_values',
run: migration02
}
];

View File

@@ -1,70 +0,0 @@
import fs from 'fs';
import { homedir } from 'os';
import { join as pathJoin } from 'path';
import { migrations } from './_migrations';
import { outro } from '@clack/prompts';
import chalk from 'chalk';
import {
getConfig,
getIsGlobalConfigFileExist,
OCO_AI_PROVIDER_ENUM
} from '../commands/config';
const migrationsFile = pathJoin(homedir(), '.opencommit_migrations');
const getCompletedMigrations = (): string[] => {
if (!fs.existsSync(migrationsFile)) {
return [];
}
const data = fs.readFileSync(migrationsFile, 'utf-8');
return data ? JSON.parse(data) : [];
};
const saveCompletedMigration = (migrationName: string) => {
const completedMigrations = getCompletedMigrations();
completedMigrations.push(migrationName);
fs.writeFileSync(
migrationsFile,
JSON.stringify(completedMigrations, null, 2)
);
};
export const runMigrations = async () => {
// if no config file, we assume it's a new installation and no migrations are needed
if (!getIsGlobalConfigFileExist()) return;
const config = getConfig();
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
const completedMigrations = getCompletedMigrations();
let isMigrated = false;
for (const migration of migrations) {
if (!completedMigrations.includes(migration.name)) {
try {
console.log('Applying migration', migration.name);
migration.run();
console.log('Migration applied successfully', migration.name);
saveCompletedMigration(migration.name);
} catch (error) {
outro(
`${chalk.red('Failed to apply migration')} ${
migration.name
}: ${error}`
);
}
isMigrated = true;
}
}
if (isMigrated) {
outro(
`${chalk.green(
'✔'
)} Migrations to your config were applied successfully. Please rerun.`
);
process.exit(0);
}
};

View File

@@ -2,9 +2,9 @@ import { getConfig, OCO_AI_PROVIDER_ENUM } from '../commands/config';
import { AnthropicEngine } from '../engine/anthropic'; import { AnthropicEngine } from '../engine/anthropic';
import { AzureEngine } from '../engine/azure'; import { AzureEngine } from '../engine/azure';
import { AiEngine } from '../engine/Engine'; import { AiEngine } from '../engine/Engine';
import { FlowiseEngine } from '../engine/flowise'; import { FlowiseAi } from '../engine/flowise';
import { GeminiEngine } from '../engine/gemini'; import { Gemini } from '../engine/gemini';
import { OllamaEngine } from '../engine/ollama'; import { OllamaAi } from '../engine/ollama';
import { OpenAiEngine } from '../engine/openAi'; import { OpenAiEngine } from '../engine/openAi';
import { TestAi, TestMockType } from '../engine/testAi'; import { TestAi, TestMockType } from '../engine/testAi';
@@ -16,30 +16,50 @@ export function getEngine(): AiEngine {
model: config.OCO_MODEL!, model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!, maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!, maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_API_URL!, baseURL: config.OCO_OPENAI_BASE_PATH!
apiKey: config.OCO_API_KEY!
}; };
switch (provider) { switch (provider) {
case OCO_AI_PROVIDER_ENUM.OLLAMA: case OCO_AI_PROVIDER_ENUM.OLLAMA:
return new OllamaEngine(DEFAULT_CONFIG); return new OllamaAi({
...DEFAULT_CONFIG,
apiKey: '',
baseURL: config.OCO_OLLAMA_API_URL!
});
case OCO_AI_PROVIDER_ENUM.ANTHROPIC: case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
return new AnthropicEngine(DEFAULT_CONFIG); return new AnthropicEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_ANTHROPIC_API_KEY!
});
case OCO_AI_PROVIDER_ENUM.TEST: case OCO_AI_PROVIDER_ENUM.TEST:
return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType); return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType);
case OCO_AI_PROVIDER_ENUM.GEMINI: case OCO_AI_PROVIDER_ENUM.GEMINI:
return new GeminiEngine(DEFAULT_CONFIG); return new Gemini({
...DEFAULT_CONFIG,
apiKey: config.OCO_GEMINI_API_KEY!,
baseURL: config.OCO_GEMINI_BASE_PATH!
});
case OCO_AI_PROVIDER_ENUM.AZURE: case OCO_AI_PROVIDER_ENUM.AZURE:
return new AzureEngine(DEFAULT_CONFIG); return new AzureEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_AZURE_API_KEY!
});
case OCO_AI_PROVIDER_ENUM.FLOWISE: case OCO_AI_PROVIDER_ENUM.FLOWISE:
return new FlowiseEngine(DEFAULT_CONFIG); return new FlowiseAi({
...DEFAULT_CONFIG,
baseURL: config.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
apiKey: config.OCO_FLOWISE_API_KEY!
});
default: default:
return new OpenAiEngine(DEFAULT_CONFIG); return new OpenAiEngine({
...DEFAULT_CONFIG,
apiKey: config.OCO_OPENAI_API_KEY!
});
} }
} }

View File

@@ -16,13 +16,18 @@ export const assertGitRepo = async () => {
// (file) => `:(exclude)${file}` // (file) => `:(exclude)${file}`
// ); // );
export const getIgnoredFolders = (): string[] => {
try {
return readFileSync('.opencommitignore').toString().split('\n');
} catch (e) {
return [];
}
};
export const getOpenCommitIgnore = (): Ignore => { export const getOpenCommitIgnore = (): Ignore => {
const ig = ignore(); const ig = ignore();
const ignorePatterns = getIgnoredFolders();
try { ig.add(ignorePatterns);
ig.add(readFileSync('.opencommitignore').toString().split('\n'));
} catch (e) {}
return ig; return ig;
}; };

View File

@@ -1,205 +0,0 @@
import path from 'path';
import 'cli-testing-library/extend-expect';
import { exec } from 'child_process';
import { prepareTempDir } from './utils';
import { promisify } from 'util';
import { render } from 'cli-testing-library';
import { resolve } from 'path';
import { rm } from 'fs';
const fsExec = promisify(exec);
const fsRemove = promisify(rm);
/**
* git remote -v
*
* [no remotes]
*/
const prepareNoRemoteGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
/**
* git remote -v
*
* origin /tmp/remote.git (fetch)
* origin /tmp/remote.git (push)
*/
const prepareOneRemoteGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
/**
* git remote -v
*
* origin /tmp/remote.git (fetch)
* origin /tmp/remote.git (push)
* other ../remote2.git (fetch)
* other ../remote2.git (push)
*/
const prepareTwoRemotesGitRepository = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await prepareTempDir();
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git init --bare other.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
await fsExec('git remote add other ../other.git', { cwd: gitDir });
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
gitDir,
cleanup
};
};
describe('cli flow to push git branch', () => {
it('do nothing when OCO_GITPUSH is set to false', async () => {
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' OCO_GITPUSH='false' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await queryByText('Choose a remote to push to')
).not.toBeInTheConsole();
expect(
await queryByText('Do you want to run `git push`?')
).not.toBeInTheConsole();
expect(
await queryByText('Successfully pushed all commits to origin')
).not.toBeInTheConsole();
expect(
await queryByText('Command failed with exit code 1')
).not.toBeInTheConsole();
await cleanup();
});
it('push and cause error when there is no remote', async () => {
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await queryByText('Choose a remote to push to')
).not.toBeInTheConsole();
expect(
await queryByText('Do you want to run `git push`?')
).not.toBeInTheConsole();
expect(
await queryByText('Successfully pushed all commits to origin')
).not.toBeInTheConsole();
expect(
await findByText('Command failed with exit code 1')
).toBeInTheConsole();
await cleanup();
});
it('push when one remote is set', async () => {
const { gitDir, cleanup } = await prepareOneRemoteGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Do you want to run `git push`?')
).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Successfully pushed all commits to origin')
).toBeInTheConsole();
await cleanup();
});
it('push when two remotes are set', async () => {
const { gitDir, cleanup } = await prepareTwoRemotesGitRepository();
await render('echo', [`'console.log("Hello World");' > index.ts`], {
cwd: gitDir
});
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(
await findByText('Successfully pushed all commits to origin')
).toBeInTheConsole();
await cleanup();
});
});

View File

@@ -17,7 +17,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
expect(await findByText('Confirm the commit message?')).toBeInTheConsole(); expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]'); userEvent.keyboard('[Enter]');
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole(); expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]'); userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole(); expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
@@ -46,7 +46,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
expect(await findByText('Successfully committed')).toBeInTheConsole(); expect(await findByText('Successfully committed')).toBeInTheConsole();
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole(); expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]'); userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole(); expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();

View File

@@ -209,7 +209,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
oco.userEvent.keyboard('[Enter]'); oco.userEvent.keyboard('[Enter]');
expect( expect(
await oco.findByText('Do you want to run `git push`?') await oco.findByText('Choose a remote to push to')
).toBeInTheConsole(); ).toBeInTheConsole();
oco.userEvent.keyboard('[Enter]'); oco.userEvent.keyboard('[Enter]');

View File

@@ -15,7 +15,7 @@ export const prepareEnvironment = async (): Promise<{
gitDir: string; gitDir: string;
cleanup: () => Promise<void>; cleanup: () => Promise<void>;
}> => { }> => {
const tempDir = await prepareTempDir(); const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command // Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
await fsExec('git init --bare remote.git', { cwd: tempDir }); await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir }); await fsExec('git clone remote.git test', { cwd: tempDir });
@@ -30,8 +30,4 @@ export const prepareEnvironment = async (): Promise<{
} }
} }
export const prepareTempDir = async(): Promise<string> => {
return await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
}
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));

View File

@@ -1,6 +1,5 @@
import { existsSync, readFileSync, rmSync } from 'fs'; import { existsSync, readFileSync, rmSync } from 'fs';
import { import {
CONFIG_KEYS,
DEFAULT_CONFIG, DEFAULT_CONFIG,
getConfig, getConfig,
setConfig setConfig
@@ -51,13 +50,14 @@ describe('config', () => {
describe('getConfig', () => { describe('getConfig', () => {
it('should prioritize local .env over global .opencommit config', async () => { it('should prioritize local .env over global .opencommit config', async () => {
globalConfigFile = await generateConfig('.opencommit', { globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key', OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-3.5-turbo', OCO_MODEL: 'gpt-3.5-turbo',
OCO_LANGUAGE: 'en' OCO_LANGUAGE: 'en'
}); });
envConfigFile = await generateConfig('.env', { envConfigFile = await generateConfig('.env', {
OCO_API_KEY: 'local-key', OCO_OPENAI_API_KEY: 'local-key',
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
OCO_LANGUAGE: 'fr' OCO_LANGUAGE: 'fr'
}); });
@@ -67,21 +67,22 @@ describe('config', () => {
}); });
expect(config).not.toEqual(null); expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('local-key'); expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo'); expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
expect(config.OCO_LANGUAGE).toEqual('fr'); expect(config.OCO_LANGUAGE).toEqual('fr');
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
}); });
it('should fallback to global config when local config is not set', async () => { it('should fallback to global config when local config is not set', async () => {
globalConfigFile = await generateConfig('.opencommit', { globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key', OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4', OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'de', OCO_LANGUAGE: 'de',
OCO_DESCRIPTION: 'true' OCO_DESCRIPTION: 'true'
}); });
envConfigFile = await generateConfig('.env', { envConfigFile = await generateConfig('.env', {
OCO_API_URL: 'local-api-url' OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
}); });
const config = getConfig({ const config = getConfig({
@@ -90,8 +91,8 @@ describe('config', () => {
}); });
expect(config).not.toEqual(null); expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('global-key'); expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
expect(config.OCO_API_URL).toEqual('local-api-url'); expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
expect(config.OCO_MODEL).toEqual('gpt-4'); expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('de'); expect(config.OCO_LANGUAGE).toEqual('de');
expect(config.OCO_DESCRIPTION).toEqual(true); expect(config.OCO_DESCRIPTION).toEqual(true);
@@ -123,7 +124,7 @@ describe('config', () => {
it('should handle empty local config correctly', async () => { it('should handle empty local config correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', { globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key', OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4', OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es' OCO_LANGUAGE: 'es'
}); });
@@ -136,20 +137,20 @@ describe('config', () => {
}); });
expect(config).not.toEqual(null); expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual('global-key'); expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
expect(config.OCO_MODEL).toEqual('gpt-4'); expect(config.OCO_MODEL).toEqual('gpt-4');
expect(config.OCO_LANGUAGE).toEqual('es'); expect(config.OCO_LANGUAGE).toEqual('es');
}); });
it('should override global config with null values in local .env', async () => { it('should override global config with null values in local .env', async () => {
globalConfigFile = await generateConfig('.opencommit', { globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'global-key', OCO_OPENAI_API_KEY: 'global-key',
OCO_MODEL: 'gpt-4', OCO_MODEL: 'gpt-4',
OCO_LANGUAGE: 'es' OCO_LANGUAGE: 'es'
}); });
envConfigFile = await generateConfig('.env', { envConfigFile = await generateConfig('.env', {
OCO_API_KEY: 'null' OCO_OPENAI_API_KEY: 'null'
}); });
const config = getConfig({ const config = getConfig({
@@ -158,7 +159,7 @@ describe('config', () => {
}); });
expect(config).not.toEqual(null); expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual(null); expect(config.OCO_OPENAI_API_KEY).toEqual(null);
}); });
it('should handle empty global config', async () => { it('should handle empty global config', async () => {
@@ -171,7 +172,7 @@ describe('config', () => {
}); });
expect(config).not.toEqual(null); expect(config).not.toEqual(null);
expect(config.OCO_API_KEY).toEqual(undefined); expect(config.OCO_OPENAI_API_KEY).toEqual(undefined);
}); });
}); });
@@ -187,12 +188,12 @@ describe('config', () => {
expect(isGlobalConfigFileExist).toBe(false); expect(isGlobalConfigFileExist).toBe(false);
await setConfig( await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key_1']], [['OCO_OPENAI_API_KEY', 'persisted-key_1']],
globalConfigFile.filePath globalConfigFile.filePath
); );
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8'); const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent).toContain('OCO_API_KEY=persisted-key_1'); expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key_1');
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => { Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
expect(fileContent).toContain(`${key}=${value}`); expect(fileContent).toContain(`${key}=${value}`);
}); });
@@ -202,48 +203,42 @@ describe('config', () => {
globalConfigFile = await generateConfig('.opencommit', {}); globalConfigFile = await generateConfig('.opencommit', {});
await setConfig( await setConfig(
[ [
[CONFIG_KEYS.OCO_API_KEY, 'new-key'], ['OCO_OPENAI_API_KEY', 'new-key'],
[CONFIG_KEYS.OCO_MODEL, 'gpt-4'] ['OCO_MODEL', 'gpt-4']
], ],
globalConfigFile.filePath globalConfigFile.filePath
); );
const config = getConfig({ const config = getConfig({ globalPath: globalConfigFile.filePath });
globalPath: globalConfigFile.filePath expect(config.OCO_OPENAI_API_KEY).toEqual('new-key');
});
expect(config.OCO_API_KEY).toEqual('new-key');
expect(config.OCO_MODEL).toEqual('gpt-4'); expect(config.OCO_MODEL).toEqual('gpt-4');
}); });
it('should update existing config values', async () => { it('should update existing config values', async () => {
globalConfigFile = await generateConfig('.opencommit', { globalConfigFile = await generateConfig('.opencommit', {
OCO_API_KEY: 'initial-key' OCO_OPENAI_API_KEY: 'initial-key'
}); });
await setConfig( await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'updated-key']], [['OCO_OPENAI_API_KEY', 'updated-key']],
globalConfigFile.filePath globalConfigFile.filePath
); );
const config = getConfig({ const config = getConfig({ globalPath: globalConfigFile.filePath });
globalPath: globalConfigFile.filePath expect(config.OCO_OPENAI_API_KEY).toEqual('updated-key');
});
expect(config.OCO_API_KEY).toEqual('updated-key');
}); });
it('should handle boolean and numeric values correctly', async () => { it('should handle boolean and numeric values correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {}); globalConfigFile = await generateConfig('.opencommit', {});
await setConfig( await setConfig(
[ [
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, '8192'], ['OCO_TOKENS_MAX_INPUT', '8192'],
[CONFIG_KEYS.OCO_DESCRIPTION, 'true'], ['OCO_DESCRIPTION', 'true'],
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT, 'false'] ['OCO_ONE_LINE_COMMIT', 'false']
], ],
globalConfigFile.filePath globalConfigFile.filePath
); );
const config = getConfig({ const config = getConfig({ globalPath: globalConfigFile.filePath });
globalPath: globalConfigFile.filePath
});
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192); expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
expect(config.OCO_DESCRIPTION).toEqual(true); expect(config.OCO_DESCRIPTION).toEqual(true);
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false); expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
@@ -271,12 +266,12 @@ describe('config', () => {
expect(isGlobalConfigFileExist).toBe(false); expect(isGlobalConfigFileExist).toBe(false);
await setConfig( await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']], [['OCO_OPENAI_API_KEY', 'persisted-key']],
globalConfigFile.filePath globalConfigFile.filePath
); );
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8'); const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent).toContain('OCO_API_KEY=persisted-key'); expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key');
}); });
it('should set multiple configs in a row and keep the changes', async () => { it('should set multiple configs in a row and keep the changes', async () => {
@@ -284,17 +279,14 @@ describe('config', () => {
expect(isGlobalConfigFileExist).toBe(false); expect(isGlobalConfigFileExist).toBe(false);
await setConfig( await setConfig(
[[CONFIG_KEYS.OCO_API_KEY, 'persisted-key']], [['OCO_OPENAI_API_KEY', 'persisted-key']],
globalConfigFile.filePath globalConfigFile.filePath
); );
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8'); const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent1).toContain('OCO_API_KEY=persisted-key'); expect(fileContent1).toContain('OCO_OPENAI_API_KEY=persisted-key');
await setConfig( await setConfig([['OCO_MODEL', 'gpt-4']], globalConfigFile.filePath);
[[CONFIG_KEYS.OCO_MODEL, 'gpt-4']],
globalConfigFile.filePath
);
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8'); const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
expect(fileContent2).toContain('OCO_MODEL=gpt-4'); expect(fileContent2).toContain('OCO_MODEL=gpt-4');

View File

@@ -1,4 +1,4 @@
import { GeminiEngine } from '../../src/engine/gemini'; import { Gemini } from '../../src/engine/gemini';
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai'; import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
import { import {
@@ -9,7 +9,7 @@ import {
import { OpenAI } from 'openai'; import { OpenAI } from 'openai';
describe('Gemini', () => { describe('Gemini', () => {
let gemini: GeminiEngine; let gemini: Gemini;
let mockConfig: ConfigType; let mockConfig: ConfigType;
let mockGoogleGenerativeAi: GoogleGenerativeAI; let mockGoogleGenerativeAi: GoogleGenerativeAI;
let mockGenerativeModel: GenerativeModel; let mockGenerativeModel: GenerativeModel;
@@ -20,8 +20,8 @@ describe('Gemini', () => {
const mockGemini = () => { const mockGemini = () => {
mockConfig = getConfig() as ConfigType; mockConfig = getConfig() as ConfigType;
gemini = new GeminiEngine({ gemini = new Gemini({
apiKey: mockConfig.OCO_API_KEY, apiKey: mockConfig.OCO_GEMINI_API_KEY,
model: mockConfig.OCO_MODEL model: mockConfig.OCO_MODEL
}); });
}; };
@@ -45,10 +45,12 @@ describe('Gemini', () => {
mockConfig = getConfig() as ConfigType; mockConfig = getConfig() as ConfigType;
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI; mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
mockConfig.OCO_API_KEY = 'mock-api-key'; mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key';
mockConfig.OCO_MODEL = 'gemini-1.5-flash'; mockConfig.OCO_MODEL = 'gemini-1.5-flash';
mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_API_KEY); mockGoogleGenerativeAi = new GoogleGenerativeAI(
mockConfig.OCO_GEMINI_API_KEY
);
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({ mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
model: mockConfig.OCO_MODEL model: mockConfig.OCO_MODEL
}); });