mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-12 23:28:16 -05:00
Compare commits
159 Commits
v3.0.0
...
378_fix_ho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1eaf5e2ad7 | ||
|
|
47eb59d665 | ||
|
|
69b3c00a52 | ||
|
|
6f4afbfb52 | ||
|
|
796de7b07e | ||
|
|
9ad281a4ee | ||
|
|
1ce357b023 | ||
|
|
45dd07d229 | ||
|
|
fa164377e4 | ||
|
|
0b89767de0 | ||
|
|
2dded4caa4 | ||
|
|
670f74ebc7 | ||
|
|
31fdd8473b | ||
|
|
89d2aa603b | ||
|
|
8702c17758 | ||
|
|
60597d23eb | ||
|
|
6f04927369 | ||
|
|
0c0cf9c627 | ||
|
|
8fe8e614ac | ||
|
|
68c9ed359c | ||
|
|
1b29f3a9fd | ||
|
|
596dcd7cea | ||
|
|
eb3be62a4f | ||
|
|
58ad1880ef | ||
|
|
5ee17235cb | ||
|
|
f9c7316eb3 | ||
|
|
dfe2730a45 | ||
|
|
a979ba091a | ||
|
|
6bbe07a9a1 | ||
|
|
0156bb9dc9 | ||
|
|
e27bbd0ac1 | ||
|
|
f3371ac1e3 | ||
|
|
9cc9f9d757 | ||
|
|
c8d5c53db1 | ||
|
|
fc4326233d | ||
|
|
07f7a05c4b | ||
|
|
9b2e41d255 | ||
|
|
6fad862aa5 | ||
|
|
c425878f21 | ||
|
|
1fb592afb2 | ||
|
|
684f3dadfc | ||
|
|
76e6070012 | ||
|
|
cb72837866 | ||
|
|
2432ef9de3 | ||
|
|
026fd1822a | ||
|
|
b42a52ef24 | ||
|
|
10b031ab36 | ||
|
|
d63b825ae5 | ||
|
|
b66da48106 | ||
|
|
6f24afc600 | ||
|
|
a80dcb03c4 | ||
|
|
bebbed856f | ||
|
|
2d5882c257 | ||
|
|
37fb140563 | ||
|
|
aebe7e200f | ||
|
|
6f1e4bcec6 | ||
|
|
2059549dce | ||
|
|
8361dc6838 | ||
|
|
73ccae9de3 | ||
|
|
c58e0c62a4 | ||
|
|
a4b4e65011 | ||
|
|
18f52772b3 | ||
|
|
fef25a2d06 | ||
|
|
bf578da16a | ||
|
|
41330d5517 | ||
|
|
a703fde7b2 | ||
|
|
c5ee5cd8df | ||
|
|
312540456a | ||
|
|
7ddbaf477a | ||
|
|
9a0f412fff | ||
|
|
7cd3ef09cb | ||
|
|
f814c6b89d | ||
|
|
74024a4997 | ||
|
|
cb7f5dd44d | ||
|
|
9cf2db84a9 | ||
|
|
ec307d561f | ||
|
|
058bad95cd | ||
|
|
7469633e3d | ||
|
|
278e4cb4c2 | ||
|
|
e19305dee2 | ||
|
|
673eee209d | ||
|
|
91399a0c68 | ||
|
|
a4480893cb | ||
|
|
c410486e30 | ||
|
|
5cda8b1b03 | ||
|
|
0ac7211ff7 | ||
|
|
670a758bee | ||
|
|
bdc98c6fa8 | ||
|
|
f0251d14bb | ||
|
|
61f1a27377 | ||
|
|
c39181e5bd | ||
|
|
45dc2c4535 | ||
|
|
a192441f68 | ||
|
|
744bb9b11d | ||
|
|
f3adc86693 | ||
|
|
714fac0637 | ||
|
|
eaf6600299 | ||
|
|
401be04b4d | ||
|
|
a9a2131ebf | ||
|
|
7dd8094760 | ||
|
|
a3d3363a01 | ||
|
|
75d0f57f09 | ||
|
|
8c92b92868 | ||
|
|
a33027b4db | ||
|
|
c1797de3da | ||
|
|
3d49081f6d | ||
|
|
8c318d96f4 | ||
|
|
9b7337f67f | ||
|
|
0b5adf104a | ||
|
|
ec699c48bf | ||
|
|
c9b45492a5 | ||
|
|
b0b90679a4 | ||
|
|
02cef105a6 | ||
|
|
407ca4b244 | ||
|
|
62e44e5e35 | ||
|
|
03fce6f5cf | ||
|
|
6155fca4b1 | ||
|
|
22d4af48c7 | ||
|
|
17d5a7143f | ||
|
|
011db5ad5e | ||
|
|
89682f0397 | ||
|
|
9ed9174b6f | ||
|
|
53ae8926fa | ||
|
|
6c743ba230 | ||
|
|
9852c36a98 | ||
|
|
5f85cafc7e | ||
|
|
0591e6e81e | ||
|
|
a296892aaf | ||
|
|
45958284c9 | ||
|
|
1d6980faf3 | ||
|
|
f793f01059 | ||
|
|
7deffa8ee2 | ||
|
|
84dfc85328 | ||
|
|
b79aef5fad | ||
|
|
57d9cc59b5 | ||
|
|
e599700d72 | ||
|
|
2761403735 | ||
|
|
e57033c4a1 | ||
|
|
ca049e4b5d | ||
|
|
2d48648f52 | ||
|
|
40297e0c6a | ||
|
|
75f0cd47b8 | ||
|
|
c76313737d | ||
|
|
a2b1890e7e | ||
|
|
df705b97b7 | ||
|
|
df280b7db7 | ||
|
|
67dff60a7d | ||
|
|
08fb4d801f | ||
|
|
ac8c87be9e | ||
|
|
0460a252e2 | ||
|
|
ebeb68fd9b | ||
|
|
a9f550fb79 | ||
|
|
4181c0b20d | ||
|
|
765e9884dd | ||
|
|
ea9411fa69 | ||
|
|
12956d7633 | ||
|
|
c1627bb98c | ||
|
|
4e374aa9db | ||
|
|
ef003bdad6 |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
.env
|
||||
28
.github/workflows/stale.yml
vendored
28
.github/workflows/stale.yml
vendored
@@ -1,28 +0,0 @@
|
||||
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
#
|
||||
# You can adjust the behavior by modifying this file.
|
||||
# For more information, see:
|
||||
# https://github.com/actions/stale
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '27 21 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 40
|
||||
stale-issue-message: 'Stale issue message'
|
||||
stale-pr-message: 'Stale pull request message'
|
||||
stale-issue-label: 'no-issue-activity'
|
||||
stale-pr-label: 'no-pr-activity'
|
||||
46
.github/workflows/test.yml
vendored
Normal file
46
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Testing
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Run Unit Tests
|
||||
run: npm run test:unit
|
||||
e2e-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install git
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y git
|
||||
git --version
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.email "test@example.com"
|
||||
git config --global user.name "Test User"
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Build
|
||||
run: npm run build
|
||||
- name: Run E2E Tests
|
||||
run: npm run test:e2e
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -10,4 +10,5 @@ uncaughtExceptions.log
|
||||
.vscode
|
||||
src/*.json
|
||||
.idea
|
||||
test.ts
|
||||
test.ts
|
||||
notes.md
|
||||
1
.npmignore
Normal file
1
.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
out/github-action.cjs
|
||||
139
README.md
139
README.md
@@ -2,13 +2,12 @@
|
||||
<div>
|
||||
<img src=".github/logo-grad.svg" alt="OpenCommit logo"/>
|
||||
<h1 align="center">OpenCommit</h1>
|
||||
<h4 align="center">Follow the bird <a href="https://twitter.com/io_Y_oi"><img src="https://img.shields.io/twitter/follow/io_Y_oi?style=flat&label=io_Y_oi&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
|
||||
<h4 align="center">Author <a href="https://twitter.com/_sukharev_"><img src="https://img.shields.io/twitter/follow/_sukharev_?style=flat&label=_sukharev_&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
|
||||
</div>
|
||||
<h2>Auto-generate meaningful commits in 1 second</h2>
|
||||
<h2>Auto-generate meaningful commits in a second</h2>
|
||||
<p>Killing lame commits with AI 🤯🔫</p>
|
||||
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
|
||||
<h4 align="center">🪩 Winner of GitHub 2023 HACKATHON <a href="https://twitter.com/io_Y_oi/status/1683448136973582336"><img style="width:14px; height:14px; margin-top: -4px" src=".github/github-mark-white.png" align="center"></a>
|
||||
</h4>
|
||||
<h4 align="center">🪩 Winner of <a href="https://twitter.com/_sukharev_/status/1683448136973582336">GitHub 2023 hackathon</a> 🪩</h4>
|
||||
</div>
|
||||
|
||||
---
|
||||
@@ -17,7 +16,7 @@
|
||||
<img src=".github/opencommit-example.png" alt="OpenCommit example"/>
|
||||
</div>
|
||||
|
||||
All the commits in this repo are authored by OpenCommit — look at [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emojis and long commit descriptions are configurable.
|
||||
All the commits in this repo are authored by OpenCommit — look at [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emojis and long commit descriptions are configurable, basically everything is.
|
||||
|
||||
## Setup OpenCommit as a CLI tool
|
||||
|
||||
@@ -29,6 +28,10 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
|
||||
npm install -g opencommit
|
||||
```
|
||||
|
||||
Alternatively run it via `npx opencommit` or `bunx opencommit`
|
||||
|
||||
MacOS may ask to run the command with `sudo` when installing a package globally.
|
||||
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
|
||||
|
||||
3. Set the key to OpenCommit config:
|
||||
@@ -55,6 +58,62 @@ git add <files...>
|
||||
oco
|
||||
```
|
||||
|
||||
Running `git add` is optional, `oco` will do it for you.
|
||||
|
||||
### Running locally with Ollama
|
||||
|
||||
You can also run it with local model through ollama:
|
||||
|
||||
- install and start ollama
|
||||
- run `ollama run mistral` (do this only once, to pull model)
|
||||
- run (in your project directory):
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
oco config set OCO_AI_PROVIDER='ollama'
|
||||
```
|
||||
|
||||
If you want to use a model other than mistral (default), you can do so by setting the `OCO_AI_PROVIDER` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_AI_PROVIDER='ollama'
|
||||
oco config set OCO_MODEL='llama3:8b'
|
||||
```
|
||||
|
||||
If you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
|
||||
|
||||
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
|
||||
|
||||
```sh
|
||||
oco config set OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
```
|
||||
|
||||
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
|
||||
|
||||
### Flags
|
||||
|
||||
There are multiple optional flags that can be used with the `oco` command:
|
||||
|
||||
#### Use Full GitMoji Specification
|
||||
|
||||
Link to the GitMoji specification: https://gitmoji.dev/
|
||||
|
||||
This flag can only be used if the `OCO_EMOJI` configuration item is set to `true`. This flag allows users to use all emojis in the GitMoji specification, By default, the GitMoji full specification is set to `false`, which only includes 10 emojis (🐛✨📝🚀✅♻️⬆️🔧🌐💡).
|
||||
|
||||
This is due to limit the number of tokens sent in each request. However, if you would like to use the full GitMoji specification, you can use the `--fgm` flag.
|
||||
|
||||
```
|
||||
oco --fgm
|
||||
```
|
||||
|
||||
#### Skip Commit Confirmation
|
||||
|
||||
This flag allows users to automatically commit the changes without having to manually confirm the commit message. This is useful for users who want to streamline the commit process and avoid additional steps. To use this flag, you can run the following command:
|
||||
|
||||
```
|
||||
oco --yes
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Local per repo configuration
|
||||
@@ -62,17 +121,24 @@ oco
|
||||
Create a `.env` file and add OpenCommit config variables there like this:
|
||||
|
||||
```env
|
||||
...
|
||||
OCO_OPENAI_API_KEY=<your OpenAI API token>
|
||||
OCO_OPENAI_MAX_TOKENS=<max response tokens from OpenAI API>
|
||||
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
|
||||
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
|
||||
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
|
||||
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
|
||||
OCO_EMOJI=<boolean, add GitMoji>
|
||||
OCO_MODEL=<either 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'>
|
||||
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
|
||||
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
|
||||
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
|
||||
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama>
|
||||
...
|
||||
```
|
||||
|
||||
This are not all the config options, but you get the point.
|
||||
|
||||
### Global config for all repos
|
||||
|
||||
Local config still has more priority than Global config, but you may set `OCO_MODEL` and `OCO_LOCALE` globally and set local configs for `OCO_EMOJI` and `OCO_DESCRIPTION` per repo which is more convenient.
|
||||
@@ -80,7 +146,7 @@ Local config still has more priority than Global config, but you may set `OCO_MO
|
||||
Simply set any of the variables above like this:
|
||||
|
||||
```sh
|
||||
oco config set OCO_MODEL=gpt-4
|
||||
oco config set OCO_MODEL=gpt-4o-mini
|
||||
```
|
||||
|
||||
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
|
||||
@@ -95,14 +161,26 @@ To remove preface emojis:
|
||||
oco config set OCO_EMOJI=false
|
||||
```
|
||||
|
||||
### Switch to GPT-4 or other models
|
||||
Other config options are behaving the same.
|
||||
|
||||
By default, OpenCommit uses `gpt-3.5-turbo-16k` model.
|
||||
### Output WHY the changes were done (WIP)
|
||||
|
||||
You may switch to GPT-4 which performs better, but costs ~x15 times more 🤠
|
||||
You can set the `OCO_WHY` config to `true` to have OpenCommit output a short description of WHY the changes were done after the commit message. Default is `false`.
|
||||
|
||||
To make this perform accurate we must store 'what files do' in some kind of an index or embedding and perform a lookup (kinda RAG) for the accurate git commit message. If you feel like building this comment on this ticket https://github.com/di-sukharev/opencommit/issues/398 and let's go from there together.
|
||||
|
||||
```sh
|
||||
oco config set OCO_MODEL=gpt-4
|
||||
oco config set OCO_WHY=true
|
||||
```
|
||||
|
||||
### Switch to GPT-4 or other models
|
||||
|
||||
By default, OpenCommit uses `gpt-4o-mini` model.
|
||||
|
||||
You may switch to gpt-4o which performs better, but costs more 🤠
|
||||
|
||||
```sh
|
||||
oco config set OCO_MODEL=gpt-4o
|
||||
```
|
||||
|
||||
or for as a cheaper option:
|
||||
@@ -111,7 +189,19 @@ or for as a cheaper option:
|
||||
oco config set OCO_MODEL=gpt-3.5-turbo
|
||||
```
|
||||
|
||||
Make sure that you spell it `gpt-4` (lowercase) and that you have API access to the 4th model. Even if you have ChatGPT+, that doesn't necessarily mean that you have API access to GPT-4.
|
||||
### Switch to Azure OpenAI
|
||||
|
||||
By default OpenCommit uses [OpenAI](https://openai.com).
|
||||
|
||||
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
|
||||
|
||||
```sh
|
||||
opencommit config set OCO_AI_PROVIDER=azure
|
||||
```
|
||||
|
||||
Of course need to set 'OCO_OPENAI_API_KEY'. And also need to set the
|
||||
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
|
||||
'model'.
|
||||
|
||||
### Locale configuration
|
||||
|
||||
@@ -132,6 +222,14 @@ oco config set OCO_LANGUAGE=française
|
||||
The default language setting is **English**
|
||||
All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder
|
||||
|
||||
### Push to git (gonna be deprecated)
|
||||
|
||||
A prompt to ushing to git is on by default but if you would like to turn it off just use:
|
||||
|
||||
```sh
|
||||
oco config set OCO_GITPUSH=false
|
||||
```
|
||||
|
||||
### Switch to `@commitlint`
|
||||
|
||||
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
|
||||
@@ -146,7 +244,7 @@ Replace `<module>` with either `conventional-commit` or `@commitlint`.
|
||||
|
||||
#### Example:
|
||||
|
||||
To switch to using th` '@commitlint` prompt module, run:
|
||||
To switch to using the `'@commitlint` prompt module, run:
|
||||
|
||||
```sh
|
||||
oco config set OCO_PROMPT_MODULE=@commitlint
|
||||
@@ -214,7 +312,7 @@ In our codebase, the implementation of this feature can be found in the followin
|
||||
|
||||
```javascript
|
||||
commitMessage = messageTemplate.replace(
|
||||
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
config.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
commitMessage
|
||||
);
|
||||
```
|
||||
@@ -271,7 +369,7 @@ Or follow the process of your IDE Source Control feature, when it calls `git com
|
||||
|
||||
OpenCommit is now available as a GitHub Action which automatically improves all new commits messages when you push to remote!
|
||||
|
||||
This is great if you want to make sure all of the commits in all of your repository branches are meaningful and not lame like `fix1` or `done2`.
|
||||
This is great if you want to make sure all commits in all of your repository branches are meaningful and not lame like `fix1` or `done2`.
|
||||
|
||||
Create a file `.github/workflows/opencommit.yml` with the contents below:
|
||||
|
||||
@@ -309,11 +407,12 @@ jobs:
|
||||
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
|
||||
|
||||
# customization
|
||||
OCO_OPENAI_MAX_TOKENS: 500
|
||||
OCO_TOKENS_MAX_INPUT: 4096
|
||||
OCO_TOKENS_MAX_OUTPUT: 500
|
||||
OCO_OPENAI_BASE_PATH: ''
|
||||
OCO_DESCRIPTION: false
|
||||
OCO_EMOJI: false
|
||||
OCO_MODEL: gpt-3.5-turbo-16k
|
||||
OCO_MODEL: gpt-4o
|
||||
OCO_LANGUAGE: en
|
||||
OCO_PROMPT_MODULE: conventional-commit
|
||||
```
|
||||
@@ -330,4 +429,6 @@ You pay for your requests to OpenAI API on your own.
|
||||
|
||||
OpenCommit stores your key locally.
|
||||
|
||||
OpenCommit by default uses ChatGPT (3.5-turbo-16k) official model, which is a lot cheaper than gpt-4.
|
||||
OpenCommit by default uses 3.5-turbo model, it should not exceed $0.10 per casual working day.
|
||||
|
||||
You may switch to gpt-4, it's better, but more expensive.
|
||||
|
||||
28
jest.config.ts
Normal file
28
jest.config.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* For a detailed explanation regarding each configuration property, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
import type { Config } from 'jest';
|
||||
|
||||
const config: Config = {
|
||||
testTimeout: 100_000,
|
||||
coverageProvider: 'v8',
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
preset: 'ts-jest/presets/js-with-ts-esm',
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
|
||||
testEnvironment: 'node',
|
||||
testRegex: ['.*\\.test\\.ts$'],
|
||||
transformIgnorePatterns: ['node_modules/(?!cli-testing-library)'],
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx)$': [
|
||||
'ts-jest',
|
||||
{
|
||||
diagnostics: false,
|
||||
useESM: true
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
export default config;
|
||||
40747
out/cli.cjs
40747
out/cli.cjs
File diff suppressed because one or more lines are too long
54745
out/github-action.cjs
54745
out/github-action.cjs
File diff suppressed because one or more lines are too long
Binary file not shown.
6834
package-lock.json
generated
6834
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
33
package.json
33
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.0.0",
|
||||
"version": "3.1.2",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -12,7 +12,8 @@
|
||||
"aicommit",
|
||||
"aicommits",
|
||||
"gptcommit",
|
||||
"commit"
|
||||
"commit",
|
||||
"ollama"
|
||||
],
|
||||
"main": "cli.js",
|
||||
"bin": {
|
||||
@@ -26,7 +27,8 @@
|
||||
"author": "https://github.com/di-sukharev",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"out/**/*"
|
||||
"out/cli.cjs",
|
||||
"out/tiktoken_bg.wasm"
|
||||
],
|
||||
"release": {
|
||||
"branches": [
|
||||
@@ -39,24 +41,39 @@
|
||||
"scripts": {
|
||||
"watch": "npm run -S build -- --sourcemap --watch",
|
||||
"start": "node ./out/cli.cjs",
|
||||
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
|
||||
"dev": "ts-node ./src/cli.ts",
|
||||
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
|
||||
"build": "rimraf out && node esbuild.config.js",
|
||||
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
|
||||
"deploy": "npm run build:push && npm version patch && git push --follow-tags && npm publish --tag latest",
|
||||
"deploy": "npm run build:push && git push --tags && npm publish --tag latest",
|
||||
"deploy:patch": "npm version patch && npm run deploy",
|
||||
"lint": "eslint src --ext ts && tsc --noEmit",
|
||||
"format": "prettier --write src"
|
||||
"format": "prettier --write src",
|
||||
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
|
||||
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
|
||||
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
|
||||
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest test/unit",
|
||||
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
|
||||
"test:e2e": "npm run test:e2e:setup && jest test/e2e",
|
||||
"test:e2e:setup": "sh test/e2e/setup.sh",
|
||||
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/types": "^17.4.4",
|
||||
"@types/ini": "^1.3.31",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^16.18.14",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"cli-testing-library": "^2.0.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"esbuild": "^0.15.18",
|
||||
"eslint": "^8.28.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^29.1.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
@@ -64,10 +81,14 @@
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@anthropic-ai/sdk": "^0.19.2",
|
||||
"@azure/openai": "^1.0.0-beta.12",
|
||||
"@clack/prompts": "^0.6.1",
|
||||
"@dqbd/tiktoken": "^1.0.2",
|
||||
"@google/generative-ai": "^0.11.4",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
"@octokit/webhooks-types": "^6.11.0",
|
||||
"ai": "^2.2.14",
|
||||
"axios": "^1.3.4",
|
||||
"chalk": "^5.2.0",
|
||||
"cleye": "^1.3.2",
|
||||
@@ -76,6 +97,6 @@
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
"openai": "^3.2.1"
|
||||
"openai": "^4.56.0"
|
||||
}
|
||||
}
|
||||
|
||||
115
src/api.ts
115
src/api.ts
@@ -1,115 +0,0 @@
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { execa } from 'execa';
|
||||
import {
|
||||
ChatCompletionRequestMessage,
|
||||
Configuration as OpenAiApiConfiguration,
|
||||
OpenAIApi
|
||||
} from 'openai';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import {
|
||||
CONFIG_MODES,
|
||||
DEFAULT_MODEL_TOKEN_LIMIT,
|
||||
getConfig
|
||||
} from './commands/config';
|
||||
import { GenerateCommitMessageErrorEnum } from './generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
|
||||
let basePath = config?.OCO_OPENAI_BASE_PATH;
|
||||
let apiKey = config?.OCO_OPENAI_API_KEY;
|
||||
|
||||
const [command, mode] = process.argv.slice(2);
|
||||
|
||||
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
|
||||
intro('opencommit');
|
||||
|
||||
outro(
|
||||
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
|
||||
);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
|
||||
|
||||
class OpenAi {
|
||||
private openAiApiConfiguration = new OpenAiApiConfiguration({
|
||||
apiKey: apiKey
|
||||
});
|
||||
private openAI!: OpenAIApi;
|
||||
|
||||
constructor() {
|
||||
if (basePath) {
|
||||
this.openAiApiConfiguration.basePath = basePath;
|
||||
}
|
||||
this.openAI = new OpenAIApi(this.openAiApiConfiguration);
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<ChatCompletionRequestMessage>
|
||||
): Promise<string | undefined> => {
|
||||
const params = {
|
||||
model: MODEL,
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: maxTokens || 500
|
||||
};
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (REQUEST_TOKENS > DEFAULT_MODEL_TOKEN_LIMIT - maxTokens) {
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
}
|
||||
|
||||
const { data } = await this.openAI.createChatCompletion(params);
|
||||
|
||||
const message = data.choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError?.message) outro(openAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const getOpenCommitLatestVersion = async (): Promise<
|
||||
string | undefined
|
||||
> => {
|
||||
try {
|
||||
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
|
||||
return stdout;
|
||||
} catch (_) {
|
||||
outro('Error while getting the latest version of opencommit');
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const api = new OpenAi();
|
||||
14
src/cli.ts
14
src/cli.ts
@@ -17,17 +17,25 @@ cli(
|
||||
version: packageJSON.version,
|
||||
name: 'opencommit',
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
flags: {},
|
||||
flags: {
|
||||
fgm: Boolean,
|
||||
yes: {
|
||||
type: Boolean,
|
||||
alias: 'y',
|
||||
description: 'Skip commit confirmation prompt',
|
||||
default: false
|
||||
}
|
||||
},
|
||||
ignoreArgv: (type) => type === 'unknown-flag' || type === 'argument',
|
||||
help: { description: packageJSON.description }
|
||||
},
|
||||
async () => {
|
||||
async ({ flags }) => {
|
||||
await checkIsLatestVersion();
|
||||
|
||||
if (await isHookCalled()) {
|
||||
prepareCommitMessageHook();
|
||||
} else {
|
||||
commit(extraArgs);
|
||||
commit(extraArgs, false, flags.fgm, flags.yes);
|
||||
}
|
||||
},
|
||||
extraArgs
|
||||
|
||||
5
src/commands/ENUMS.ts
Normal file
5
src/commands/ENUMS.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export enum COMMANDS {
|
||||
config = 'config',
|
||||
hook = 'hook',
|
||||
commitlint = 'commitlint'
|
||||
}
|
||||
@@ -1,6 +1,3 @@
|
||||
import chalk from 'chalk';
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
confirm,
|
||||
intro,
|
||||
@@ -10,7 +7,8 @@ import {
|
||||
select,
|
||||
spinner
|
||||
} from '@clack/prompts';
|
||||
|
||||
import chalk from 'chalk';
|
||||
import { execa } from 'execa';
|
||||
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
|
||||
import {
|
||||
assertGitRepo,
|
||||
@@ -32,35 +30,50 @@ const getGitRemotes = async () => {
|
||||
// Check for the presence of message templates
|
||||
const checkMessageTemplate = (extraArgs: string[]): string | false => {
|
||||
for (const key in extraArgs) {
|
||||
if (extraArgs[key].includes(config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER))
|
||||
if (extraArgs[key].includes(config.OCO_MESSAGE_TEMPLATE_PLACEHOLDER))
|
||||
return extraArgs[key];
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const generateCommitMessageFromGitDiff = async (
|
||||
diff: string,
|
||||
extraArgs: string[]
|
||||
): Promise<void> => {
|
||||
interface GenerateCommitMessageFromGitDiffParams {
|
||||
diff: string;
|
||||
extraArgs: string[];
|
||||
fullGitMojiSpec?: boolean;
|
||||
skipCommitConfirmation?: boolean;
|
||||
}
|
||||
|
||||
const generateCommitMessageFromGitDiff = async ({
|
||||
diff,
|
||||
extraArgs,
|
||||
fullGitMojiSpec = false,
|
||||
skipCommitConfirmation = false
|
||||
}: GenerateCommitMessageFromGitDiffParams): Promise<void> => {
|
||||
await assertGitRepo();
|
||||
const commitSpinner = spinner();
|
||||
commitSpinner.start('Generating the commit message');
|
||||
const commitGenerationSpinner = spinner();
|
||||
commitGenerationSpinner.start('Generating the commit message');
|
||||
|
||||
try {
|
||||
let commitMessage = await generateCommitMessageByDiff(diff);
|
||||
let commitMessage = await generateCommitMessageByDiff(
|
||||
diff,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
const messageTemplate = checkMessageTemplate(extraArgs);
|
||||
if (
|
||||
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER &&
|
||||
config.OCO_MESSAGE_TEMPLATE_PLACEHOLDER &&
|
||||
typeof messageTemplate === 'string'
|
||||
) {
|
||||
const messageTemplateIndex = extraArgs.indexOf(messageTemplate);
|
||||
extraArgs.splice(messageTemplateIndex, 1);
|
||||
|
||||
commitMessage = messageTemplate.replace(
|
||||
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
config.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
commitMessage
|
||||
);
|
||||
}
|
||||
|
||||
commitSpinner.stop('📝 Commit message generated');
|
||||
commitGenerationSpinner.stop('📝 Commit message generated');
|
||||
|
||||
outro(
|
||||
`Generated commit message:
|
||||
@@ -69,19 +82,26 @@ ${commitMessage}
|
||||
${chalk.grey('——————————————————')}`
|
||||
);
|
||||
|
||||
const isCommitConfirmedByUser = await confirm({
|
||||
message: 'Confirm the commit message?'
|
||||
});
|
||||
const isCommitConfirmedByUser =
|
||||
skipCommitConfirmation ||
|
||||
(await confirm({
|
||||
message: 'Confirm the commit message?'
|
||||
}));
|
||||
|
||||
if (isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
|
||||
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isCommitConfirmedByUser) {
|
||||
const committingChangesSpinner = spinner();
|
||||
committingChangesSpinner.start('Committing the changes');
|
||||
const { stdout } = await execa('git', [
|
||||
'commit',
|
||||
'-m',
|
||||
commitMessage,
|
||||
...extraArgs
|
||||
]);
|
||||
|
||||
outro(`${chalk.green('✔')} Successfully committed`);
|
||||
committingChangesSpinner.stop(
|
||||
`${chalk.green('✔')} Successfully committed`
|
||||
);
|
||||
|
||||
outro(stdout);
|
||||
|
||||
@@ -93,12 +113,14 @@ ${chalk.grey('——————————————————')}`
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (remotes.length === 1) {
|
||||
if (remotes.length === 1 && config.OCO_GITPUSH !== true) {
|
||||
const isPushConfirmedByUser = await confirm({
|
||||
message: 'Do you want to run `git push`?'
|
||||
});
|
||||
|
||||
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
|
||||
if (isCancel(isPushConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isPushConfirmedByUser) {
|
||||
const pushSpinner = spinner();
|
||||
|
||||
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
|
||||
@@ -126,25 +148,39 @@ ${chalk.grey('——————————————————')}`
|
||||
options: remotes.map((remote) => ({ value: remote, label: remote }))
|
||||
})) as string;
|
||||
|
||||
if (!isCancel(selectedRemote)) {
|
||||
const pushSpinner = spinner();
|
||||
if (isCancel(selectedRemote)) process.exit(1);
|
||||
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
const pushSpinner = spinner();
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
} else outro(`${chalk.gray('✖')} process cancelled`);
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
}
|
||||
} else {
|
||||
const regenerateMessage = await confirm({
|
||||
message: 'Do you want to regenerate the message?'
|
||||
});
|
||||
|
||||
if (isCancel(regenerateMessage)) process.exit(1);
|
||||
|
||||
if (regenerateMessage) {
|
||||
await generateCommitMessageFromGitDiff({
|
||||
diff,
|
||||
extraArgs,
|
||||
fullGitMojiSpec
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
commitSpinner.stop('📝 Commit message generated');
|
||||
commitGenerationSpinner.stop('📝 Commit message generated');
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
@@ -154,7 +190,9 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
export async function commit(
|
||||
extraArgs: string[] = [],
|
||||
isStageAllFlag: Boolean = false
|
||||
isStageAllFlag: Boolean = false,
|
||||
fullGitMojiSpec: boolean = false,
|
||||
skipCommitConfirmation: boolean = false
|
||||
) {
|
||||
if (isStageAllFlag) {
|
||||
const changedFiles = await getChangedFiles();
|
||||
@@ -190,11 +228,10 @@ export async function commit(
|
||||
message: 'Do you want to stage all files and generate commit message?'
|
||||
});
|
||||
|
||||
if (
|
||||
isStageAllAndCommitConfirmedByUser &&
|
||||
!isCancel(isStageAllAndCommitConfirmedByUser)
|
||||
) {
|
||||
await commit(extraArgs, true);
|
||||
if (isCancel(isStageAllAndCommitConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isStageAllAndCommitConfirmedByUser) {
|
||||
await commit(extraArgs, true, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -212,7 +249,7 @@ export async function commit(
|
||||
await gitAdd({ files });
|
||||
}
|
||||
|
||||
await commit(extraArgs, false);
|
||||
await commit(extraArgs, false, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -223,10 +260,12 @@ export async function commit(
|
||||
);
|
||||
|
||||
const [, generateCommitError] = await trytm(
|
||||
generateCommitMessageFromGitDiff(
|
||||
await getDiff({ files: stagedFiles }),
|
||||
extraArgs
|
||||
)
|
||||
generateCommitMessageFromGitDiff({
|
||||
diff: await getDiff({ files: stagedFiles }),
|
||||
extraArgs,
|
||||
fullGitMojiSpec,
|
||||
skipCommitConfirmation
|
||||
})
|
||||
);
|
||||
|
||||
if (generateCommitError) {
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import { COMMANDS } from '../CommandsEnum';
|
||||
import { configureCommitlintIntegration } from '../modules/commitlint/config';
|
||||
import { getCommitlintLLMConfig } from '../modules/commitlint/utils';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
get = 'get',
|
||||
@@ -25,7 +23,7 @@ export const commitlintConfigCommand = command(
|
||||
if (mode === CONFIG_MODES.get) {
|
||||
const commitLintConfig = await getCommitlintLLMConfig();
|
||||
|
||||
outro(commitLintConfig.toString());
|
||||
outro(JSON.stringify(commitLintConfig, null, 2));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,45 +1,121 @@
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import * as dotenv from 'dotenv';
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { parse as iniParse, stringify as iniStringify } from 'ini';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import { COMMANDS } from '../CommandsEnum';
|
||||
import { getI18nLocal } from '../i18n';
|
||||
|
||||
dotenv.config();
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
import { TEST_MOCK_TYPES } from '../engine/testAi';
|
||||
import { getI18nLocal, i18n } from '../i18n';
|
||||
|
||||
export enum CONFIG_KEYS {
|
||||
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
|
||||
OCO_OPENAI_MAX_TOKENS = 'OCO_OPENAI_MAX_TOKENS',
|
||||
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
|
||||
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
|
||||
OCO_GEMINI_API_KEY = 'OCO_GEMINI_API_KEY',
|
||||
OCO_GEMINI_BASE_PATH = 'OCO_GEMINI_BASE_PATH',
|
||||
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
|
||||
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
|
||||
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
|
||||
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
|
||||
OCO_EMOJI = 'OCO_EMOJI',
|
||||
OCO_MODEL = 'OCO_MODEL',
|
||||
OCO_LANGUAGE = 'OCO_LANGUAGE',
|
||||
OCO_WHY = 'OCO_WHY',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE'
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
|
||||
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
|
||||
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
|
||||
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
|
||||
OCO_API_URL = 'OCO_API_URL',
|
||||
OCO_OLLAMA_API_URL = 'OCO_OLLAMA_API_URL',
|
||||
OCO_FLOWISE_ENDPOINT = 'OCO_FLOWISE_ENDPOINT',
|
||||
OCO_FLOWISE_API_KEY = 'OCO_FLOWISE_API_KEY'
|
||||
}
|
||||
|
||||
export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
get = 'get',
|
||||
set = 'set'
|
||||
}
|
||||
|
||||
export const MODEL_LIST = {
|
||||
openai: [
|
||||
'gpt-4o-mini',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-instruct',
|
||||
'gpt-3.5-turbo-0613',
|
||||
'gpt-3.5-turbo-0301',
|
||||
'gpt-3.5-turbo-1106',
|
||||
'gpt-3.5-turbo-0125',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-3.5-turbo-16k-0613',
|
||||
'gpt-3.5-turbo-16k-0301',
|
||||
'gpt-4',
|
||||
'gpt-4-0314',
|
||||
'gpt-4-0613',
|
||||
'gpt-4-1106-preview',
|
||||
'gpt-4-0125-preview',
|
||||
'gpt-4-turbo-preview',
|
||||
'gpt-4-vision-preview',
|
||||
'gpt-4-1106-vision-preview',
|
||||
'gpt-4-turbo',
|
||||
'gpt-4-turbo-2024-04-09',
|
||||
'gpt-4-32k',
|
||||
'gpt-4-32k-0314',
|
||||
'gpt-4-32k-0613',
|
||||
'gpt-4o',
|
||||
'gpt-4o-2024-05-13',
|
||||
'gpt-4o-mini-2024-07-18'
|
||||
],
|
||||
|
||||
anthropic: [
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307'
|
||||
],
|
||||
|
||||
gemini: [
|
||||
'gemini-1.5-flash',
|
||||
'gemini-1.5-pro',
|
||||
'gemini-1.0-pro',
|
||||
'gemini-pro-vision',
|
||||
'text-embedding-004'
|
||||
]
|
||||
};
|
||||
|
||||
const getDefaultModel = (provider: string | undefined): string => {
|
||||
switch (provider) {
|
||||
case 'ollama':
|
||||
return '';
|
||||
case 'anthropic':
|
||||
return MODEL_LIST.anthropic[0];
|
||||
case 'gemini':
|
||||
return MODEL_LIST.gemini[0];
|
||||
default:
|
||||
return MODEL_LIST.openai[0];
|
||||
}
|
||||
};
|
||||
|
||||
export enum DEFAULT_TOKEN_LIMITS {
|
||||
DEFAULT_MAX_TOKENS_INPUT = 40960,
|
||||
DEFAULT_MAX_TOKENS_OUTPUT = 4096
|
||||
}
|
||||
|
||||
const validateConfig = (
|
||||
key: string,
|
||||
condition: any,
|
||||
validationMessage: string
|
||||
) => {
|
||||
if (!condition) {
|
||||
outro(`${chalk.red('✖')} wrong value for ${key}: ${validationMessage}.`);
|
||||
|
||||
outro(
|
||||
`${chalk.red('✖')} Unsupported config key ${key}: ${validationMessage}`
|
||||
'For more help refer to docs https://github.com/di-sukharev/opencommit'
|
||||
);
|
||||
|
||||
process.exit(1);
|
||||
@@ -47,17 +123,65 @@ const validateConfig = (
|
||||
};
|
||||
|
||||
export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config?: any) {
|
||||
validateConfig(CONFIG_KEYS.OCO_OPENAI_API_KEY, value, 'Cannot be empty');
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'openai') return value;
|
||||
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_API_KEY,
|
||||
value.startsWith('sk-'),
|
||||
'Must start with "sk-"'
|
||||
'OCO_OPENAI_API_KEY',
|
||||
typeof value === 'string' && value.length > 0,
|
||||
'Empty value is not allowed'
|
||||
);
|
||||
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_API_KEY,
|
||||
config[CONFIG_KEYS.OCO_OPENAI_BASE_PATH] || value.length === 51,
|
||||
'Must be 51 characters long'
|
||||
'OCO_OPENAI_API_KEY',
|
||||
value,
|
||||
'You need to provide the OCO_OPENAI_API_KEY when OCO_AI_PROVIDER is set to "openai" (default). Run `oco config set OCO_OPENAI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'azure') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_AZURE_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_AZURE_API_KEY when OCO_AI_PROVIDER is set to "azure". Run: `oco config set OCO_AZURE_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'gemini') return value;
|
||||
|
||||
validateConfig(
|
||||
'OCO_GEMINI_API_KEY',
|
||||
value || config.OCO_GEMINI_API_KEY || config.OCO_AI_PROVIDER === 'test',
|
||||
'You need to provide the OCO_GEMINI_API_KEY when OCO_AI_PROVIDER is set to "gemini". Run: `oco config set OCO_GEMINI_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
|
||||
if (config.OCO_AI_PROVIDER !== 'anthropic') return value;
|
||||
|
||||
validateConfig(
|
||||
'ANTHROPIC_API_KEY',
|
||||
!!value,
|
||||
'You need to provide the OCO_ANTHROPIC_API_KEY key when OCO_AI_PROVIDER is set to "anthropic". Run: `oco config set OCO_ANTHROPIC_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY](value: any, config: any = {}) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_API_KEY,
|
||||
value || config.OCO_AI_PROVIDER !== 'flowise',
|
||||
'You need to provide the OCO_FLOWISE_API_KEY when OCO_AI_PROVIDER is set to "flowise". Run: `oco config set OCO_FLOWISE_API_KEY=your_key`'
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -67,25 +191,28 @@ export const configValidators = {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_DESCRIPTION,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
'Must be boolean: true or false'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS](value: any) {
|
||||
// If the value is a string, convert it to a number.
|
||||
if (typeof value === 'string') {
|
||||
value = parseInt(value);
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
|
||||
!isNaN(value),
|
||||
'Must be a number'
|
||||
);
|
||||
}
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) {
|
||||
value = parseInt(value);
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
|
||||
value ? typeof value === 'number' : undefined,
|
||||
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT,
|
||||
!isNaN(value),
|
||||
'Must be a number'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT](value: any) {
|
||||
value = parseInt(value);
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
|
||||
!isNaN(value),
|
||||
'Must be a number'
|
||||
);
|
||||
|
||||
@@ -96,18 +223,21 @@ export const configValidators = {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_EMOJI,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
'Must be boolean: true or false'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_LANGUAGE](value: any) {
|
||||
const supportedLanguages = Object.keys(i18n);
|
||||
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_LANGUAGE,
|
||||
getI18nLocal(value),
|
||||
`${value} is not supported yet`
|
||||
`${value} is not supported yet. Supported languages: ${supportedLanguages}`
|
||||
);
|
||||
|
||||
return getI18nLocal(value);
|
||||
},
|
||||
|
||||
@@ -120,19 +250,19 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_MODEL](value: any) {
|
||||
[CONFIG_KEYS.OCO_MODEL](value: any, config: any = {}) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_MODEL,
|
||||
[
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-4',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-3.5-turbo-0613'
|
||||
].includes(value),
|
||||
`${value} is not supported yet, use 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'`
|
||||
typeof value === 'string',
|
||||
`${value} is not supported yet, use:\n\n ${[
|
||||
...MODEL_LIST.openai,
|
||||
...MODEL_LIST.anthropic,
|
||||
...MODEL_LIST.gemini
|
||||
].join('\n')}`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
@@ -148,93 +278,297 @@ export const configValidators = {
|
||||
['conventional-commit', '@commitlint'].includes(value),
|
||||
`${value} is not supported yet, use '@commitlint' or 'conventional-commit' (default)`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
// todo: deprecate
|
||||
[CONFIG_KEYS.OCO_GITPUSH](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_GITPUSH,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
|
||||
if (!value) value = 'openai';
|
||||
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_AI_PROVIDER,
|
||||
['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise'].includes(
|
||||
value
|
||||
) || value.startsWith('ollama'),
|
||||
`${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_ONE_LINE_COMMIT,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
|
||||
value.includes('openai.azure.com'),
|
||||
'Must be in format "https://<resource name>.openai.azure.com/"'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_FLOWISE_ENDPOINT,
|
||||
typeof value === 'string' && value.includes(':'),
|
||||
'Value must be string and should include both I.P. and port number' // Considering the possibility of DNS lookup or feeding the I.P. explicitly, there is no pattern to verify, except a column for the port number
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_TEST_MOCK_TYPE,
|
||||
TEST_MOCK_TYPES.includes(value),
|
||||
`${value} is not supported yet, use ${TEST_MOCK_TYPES.map(
|
||||
(t) => `'${t}'`
|
||||
).join(', ')}`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OLLAMA_API_URL,
|
||||
typeof value === 'string' && value.startsWith('http'),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
export enum OCO_AI_PROVIDER_ENUM {
|
||||
OLLAMA = 'ollama',
|
||||
OPENAI = 'openai',
|
||||
ANTHROPIC = 'anthropic',
|
||||
GEMINI = 'gemini',
|
||||
AZURE = 'azure',
|
||||
TEST = 'test',
|
||||
FLOWISE = 'flowise'
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
[key in CONFIG_KEYS]?: any;
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_AZURE_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_GEMINI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
[CONFIG_KEYS.OCO_WHY]: boolean;
|
||||
[CONFIG_KEYS.OCO_MODEL]: string;
|
||||
[CONFIG_KEYS.OCO_LANGUAGE]: string;
|
||||
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER]: string;
|
||||
[CONFIG_KEYS.OCO_PROMPT_MODULE]: OCO_PROMPT_MODULE_ENUM;
|
||||
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
|
||||
[CONFIG_KEYS.OCO_GITPUSH]: boolean;
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
|
||||
[CONFIG_KEYS.OCO_AZURE_ENDPOINT]?: string;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_OLLAMA_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_ENDPOINT]: string;
|
||||
[CONFIG_KEYS.OCO_FLOWISE_API_KEY]?: string;
|
||||
};
|
||||
|
||||
const configPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
|
||||
export const getConfig = (): ConfigType | null => {
|
||||
const configFromEnv = {
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_OPENAI_MAX_TOKENS: process.env.OCO_OPENAI_MAX_TOKENS
|
||||
? Number(process.env.OCO_OPENAI_MAX_TOKENS)
|
||||
: undefined,
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,
|
||||
OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false,
|
||||
OCO_MODEL: process.env.OCO_MODEL || 'gpt-3.5-turbo-16k',
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit'
|
||||
};
|
||||
const assertConfigsAreValid = (config: Record<string, any>) => {
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
if (!value) continue;
|
||||
|
||||
const configExists = existsSync(configPath);
|
||||
if (!configExists) return configFromEnv;
|
||||
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
const config = iniParse(configFile);
|
||||
|
||||
for (const configKey of Object.keys(config)) {
|
||||
if (
|
||||
!config[configKey] ||
|
||||
['null', 'undefined'].includes(config[configKey])
|
||||
) {
|
||||
config[configKey] = undefined;
|
||||
if (typeof value === 'string' && ['null', 'undefined'].includes(value)) {
|
||||
config[key] = undefined;
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const validator = configValidators[configKey as CONFIG_KEYS];
|
||||
const validValue = validator(
|
||||
config[configKey] ?? configFromEnv[configKey as CONFIG_KEYS],
|
||||
config
|
||||
);
|
||||
|
||||
config[configKey] = validValue;
|
||||
try {
|
||||
const validate = configValidators[key as CONFIG_KEYS];
|
||||
validate(value, config);
|
||||
} catch (error) {
|
||||
outro(
|
||||
`'${configKey}' name is invalid, it should be either 'OCO_${configKey.toUpperCase()}' or it doesn't exist.`
|
||||
);
|
||||
outro(`Unknown '${key}' config option or missing validator.`);
|
||||
outro(
|
||||
`Manually fix the '.env' file or global '~/.opencommit' config file.`
|
||||
);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
enum OCO_PROMPT_MODULE_ENUM {
|
||||
CONVENTIONAL_COMMIT = 'conventional-commit',
|
||||
COMMITLINT = '@commitlint'
|
||||
}
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT,
|
||||
OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel('openai'),
|
||||
OCO_LANGUAGE: 'en',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg',
|
||||
OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT,
|
||||
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: 'commit-message',
|
||||
OCO_FLOWISE_ENDPOINT: ':',
|
||||
OCO_WHY: false,
|
||||
OCO_GITPUSH: true // todo: deprecate
|
||||
};
|
||||
|
||||
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
writeFileSync(configPath, iniStringify(DEFAULT_CONFIG), 'utf8');
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
|
||||
const parseEnvVarValue = (value?: any) => {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch (error) {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
const getEnvConfig = (envPath: string) => {
|
||||
dotenv.config({ path: envPath });
|
||||
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
|
||||
OCO_GEMINI_API_KEY: process.env.OCO_GEMINI_API_KEY,
|
||||
OCO_FLOWISE_API_KEY: process.env.OCO_FLOWISE_API_KEY,
|
||||
|
||||
OCO_TOKENS_MAX_INPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
OCO_TOKENS_MAX_OUTPUT: parseEnvVarValue(process.env.OCO_TOKENS_MAX_OUTPUT),
|
||||
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_GEMINI_BASE_PATH: process.env.OCO_GEMINI_BASE_PATH,
|
||||
|
||||
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT,
|
||||
OCO_FLOWISE_ENDPOINT: process.env.OCO_FLOWISE_ENDPOINT,
|
||||
OCO_OLLAMA_API_URL: process.env.OCO_OLLAMA_API_URL,
|
||||
|
||||
OCO_DESCRIPTION: parseEnvVarValue(process.env.OCO_DESCRIPTION),
|
||||
OCO_EMOJI: parseEnvVarValue(process.env.OCO_EMOJI),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE,
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
OCO_ONE_LINE_COMMIT: parseEnvVarValue(process.env.OCO_ONE_LINE_COMMIT),
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
};
|
||||
};
|
||||
|
||||
const getGlobalConfig = (configPath: string) => {
|
||||
let globalConfig: ConfigType;
|
||||
|
||||
const isGlobalConfigFileExist = existsSync(configPath);
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
globalConfig = iniParse(configFile) as ConfigType;
|
||||
}
|
||||
|
||||
return globalConfig;
|
||||
};
|
||||
|
||||
/**
|
||||
* Merges two configs.
|
||||
* Env config takes precedence over global ~/.opencommit config file
|
||||
* @param main - env config
|
||||
* @param fallback - global ~/.opencommit config file
|
||||
* @returns merged config
|
||||
*/
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) =>
|
||||
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
|
||||
interface GetConfigOptions {
|
||||
globalPath?: string;
|
||||
envPath?: string;
|
||||
}
|
||||
|
||||
export const getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
}: GetConfigOptions = {}): ConfigType => {
|
||||
const envConfig = getEnvConfig(envPath);
|
||||
const globalConfig = getGlobalConfig(globalPath);
|
||||
|
||||
const config = mergeConfigs(envConfig, globalConfig);
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
export const setConfig = (keyValues: [key: string, value: string][]) => {
|
||||
const config = getConfig() || {};
|
||||
export const setConfig = (
|
||||
keyValues: [key: string, value: string][],
|
||||
globalConfigPath: string = defaultConfigPath
|
||||
) => {
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
|
||||
for (const [configKey, configValue] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(configKey)) {
|
||||
throw new Error(`Unsupported config key: ${configKey}`);
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join('\n');
|
||||
throw new Error(
|
||||
`Unsupported config key: ${key}. Expected keys are:\n\n${supportedKeys}.\n\nFor more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
);
|
||||
}
|
||||
|
||||
let parsedConfigValue;
|
||||
|
||||
try {
|
||||
parsedConfigValue = JSON.parse(configValue);
|
||||
parsedConfigValue = JSON.parse(value);
|
||||
} catch (error) {
|
||||
parsedConfigValue = configValue;
|
||||
parsedConfigValue = value;
|
||||
}
|
||||
|
||||
const validValue =
|
||||
configValidators[configKey as CONFIG_KEYS](parsedConfigValue);
|
||||
config[configKey as CONFIG_KEYS] = validValue;
|
||||
const validValue = configValidators[key as CONFIG_KEYS](
|
||||
parsedConfigValue,
|
||||
config
|
||||
);
|
||||
|
||||
config[key] = validValue;
|
||||
}
|
||||
|
||||
writeFileSync(configPath, iniStringify(config), 'utf8');
|
||||
writeFileSync(globalConfigPath, iniStringify(config), 'utf8');
|
||||
|
||||
outro(`${chalk.green('✔')} Config successfully set`);
|
||||
outro(`${chalk.green('✔')} config successfully set`);
|
||||
};
|
||||
|
||||
export const configCommand = command(
|
||||
@@ -243,9 +577,9 @@ export const configCommand = command(
|
||||
parameters: ['<mode>', '<key=values...>']
|
||||
},
|
||||
async (argv) => {
|
||||
intro('opencommit — config');
|
||||
try {
|
||||
const { mode, keyValues } = argv._;
|
||||
intro(`COMMAND: config ${mode} ${keyValues}`);
|
||||
|
||||
if (mode === CONFIG_MODES.get) {
|
||||
const config = getConfig() || {};
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import { existsSync } from 'fs';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import { COMMANDS } from '../CommandsEnum.js';
|
||||
import { assertGitRepo, getCoreHooksPath } from '../utils/git.js';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
|
||||
const HOOK_NAME = 'prepare-commit-msg';
|
||||
const DEFAULT_SYMLINK_URL = path.join('.git', 'hooks', HOOK_NAME);
|
||||
@@ -94,7 +92,7 @@ export const hookCommand = command(
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Unsupported mode: ${mode}. Supported modes are: 'set' or 'unset'`
|
||||
`Unsupported mode: ${mode}. Supported modes are: 'set' or 'unset'. Run: \`oco hook set\``
|
||||
);
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${error}`);
|
||||
|
||||
@@ -39,10 +39,15 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
if (!config?.OCO_OPENAI_API_KEY) {
|
||||
throw new Error(
|
||||
'No OPEN_AI_API exists. Set your OPEN_AI_API=<key> in ~/.opencommit'
|
||||
if (
|
||||
!config.OCO_OPENAI_API_KEY &&
|
||||
!config.OCO_ANTHROPIC_API_KEY &&
|
||||
!config.OCO_AZURE_API_KEY
|
||||
) {
|
||||
outro(
|
||||
'No OCO_OPENAI_API_KEY or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key via `oco config set <key>=<value>, e.g. `oco config set OCO_OPENAI_API_KEY=<value>`. For more info see https://github.com/di-sukharev/opencommit'
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const spin = spinner();
|
||||
|
||||
28
src/engine/Engine.ts
Normal file
28
src/engine/Engine.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import AnthropicClient from '@anthropic-ai/sdk';
|
||||
import { OpenAIClient as AzureOpenAIClient } from '@azure/openai';
|
||||
import { GoogleGenerativeAI as GeminiClient } from '@google/generative-ai';
|
||||
import { AxiosInstance as RawAxiosClient } from 'axios';
|
||||
import { OpenAI as OpenAIClient } from 'openai';
|
||||
|
||||
export interface AiEngineConfig {
|
||||
apiKey: string;
|
||||
model: string;
|
||||
maxTokensOutput: number;
|
||||
maxTokensInput: number;
|
||||
baseURL?: string;
|
||||
}
|
||||
|
||||
type Client =
|
||||
| OpenAIClient
|
||||
| AzureOpenAIClient
|
||||
| AnthropicClient
|
||||
| RawAxiosClient
|
||||
| GeminiClient;
|
||||
|
||||
export interface AiEngine {
|
||||
config: AiEngineConfig;
|
||||
client: Client;
|
||||
generateCommitMessage(
|
||||
messages: Array<OpenAIClient.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null | undefined>;
|
||||
}
|
||||
78
src/engine/anthropic.ts
Normal file
78
src/engine/anthropic.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import AnthropicClient from '@anthropic-ai/sdk';
|
||||
import {
|
||||
MessageCreateParamsNonStreaming,
|
||||
MessageParam
|
||||
} from '@anthropic-ai/sdk/resources/messages.mjs';
|
||||
import { outro } from '@clack/prompts';
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface AnthropicConfig extends AiEngineConfig {}
|
||||
|
||||
export class AnthropicEngine implements AiEngine {
|
||||
config: AnthropicConfig;
|
||||
client: AnthropicClient;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = new AnthropicClient({ apiKey: this.config.apiKey });
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> => {
|
||||
const systemMessage = messages.find((msg) => msg.role === 'system')
|
||||
?.content as string;
|
||||
const restMessages = messages.filter(
|
||||
(msg) => msg.role !== 'system'
|
||||
) as MessageParam[];
|
||||
|
||||
const params: MessageCreateParamsNonStreaming = {
|
||||
model: this.config.model,
|
||||
system: systemMessage,
|
||||
messages: restMessages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
};
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (
|
||||
REQUEST_TOKENS >
|
||||
this.config.maxTokensInput - this.config.maxTokensOutput
|
||||
) {
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
}
|
||||
|
||||
const data = await this.client.messages.create(params);
|
||||
|
||||
const message = data?.content[0].text;
|
||||
|
||||
return message;
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const anthropicAiError = error.response.data.error;
|
||||
|
||||
if (anthropicAiError?.message) outro(anthropicAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
77
src/engine/azure.ts
Normal file
77
src/engine/azure.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import {
|
||||
AzureKeyCredential,
|
||||
OpenAIClient as AzureOpenAIClient
|
||||
} from '@azure/openai';
|
||||
import { outro } from '@clack/prompts';
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface AzureAiEngineConfig extends AiEngineConfig {
|
||||
baseURL: string;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
export class AzureEngine implements AiEngine {
|
||||
config: AzureAiEngineConfig;
|
||||
client: AzureOpenAIClient;
|
||||
|
||||
constructor(config: AzureAiEngineConfig) {
|
||||
this.config = config;
|
||||
this.client = new AzureOpenAIClient(
|
||||
this.config.baseURL,
|
||||
new AzureKeyCredential(this.config.apiKey)
|
||||
);
|
||||
}
|
||||
|
||||
generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> => {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (
|
||||
REQUEST_TOKENS >
|
||||
this.config.maxTokensInput - this.config.maxTokensOutput
|
||||
) {
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
}
|
||||
|
||||
const data = await this.client.getChatCompletions(
|
||||
this.config.model,
|
||||
messages
|
||||
);
|
||||
|
||||
const message = data.choices[0].message;
|
||||
|
||||
if (message?.content === null) {
|
||||
return undefined;
|
||||
}
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${this.config.model}`);
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${JSON.stringify(error)}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError?.message) outro(openAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
45
src/engine/flowise.ts
Normal file
45
src/engine/flowise.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface FlowiseAiConfig extends AiEngineConfig {}
|
||||
|
||||
export class FlowiseAi implements AiEngine {
|
||||
config: FlowiseAiConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = axios.create({
|
||||
url: `${config.baseURL}/${config.apiKey}`,
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const gitDiff = (messages[messages.length - 1]?.content as string)
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\n/g, '\\n')
|
||||
.replace(/\r/g, '\\r')
|
||||
.replace(/\t/g, '\\t');
|
||||
|
||||
const payload = {
|
||||
question: gitDiff,
|
||||
overrideConfig: {
|
||||
systemMessagePrompt: messages[0]?.content
|
||||
},
|
||||
history: messages.slice(1, -1)
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post('', payload);
|
||||
const message = response.data;
|
||||
return message?.text;
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error('local model issues. details: ' + message);
|
||||
}
|
||||
}
|
||||
}
|
||||
88
src/engine/gemini.ts
Normal file
88
src/engine/gemini.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import {
|
||||
Content,
|
||||
GoogleGenerativeAI,
|
||||
HarmBlockThreshold,
|
||||
HarmCategory,
|
||||
Part
|
||||
} from '@google/generative-ai';
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface GeminiConfig extends AiEngineConfig {}
|
||||
|
||||
export class Gemini implements AiEngine {
|
||||
config: GeminiConfig;
|
||||
client: GoogleGenerativeAI;
|
||||
|
||||
constructor(config) {
|
||||
this.client = new GoogleGenerativeAI(config.apiKey);
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const systemInstruction = messages
|
||||
.filter((m) => m.role === 'system')
|
||||
.map((m) => m.content)
|
||||
.join('\n');
|
||||
|
||||
const gemini = this.client.getGenerativeModel({
|
||||
model: this.config.model,
|
||||
systemInstruction
|
||||
});
|
||||
|
||||
const contents = messages
|
||||
.filter((m) => m.role !== 'system')
|
||||
.map(
|
||||
(m) =>
|
||||
({
|
||||
parts: [{ text: m.content } as Part],
|
||||
role: m.role === 'user' ? m.role : 'model'
|
||||
} as Content)
|
||||
);
|
||||
|
||||
try {
|
||||
const result = await gemini.generateContent({
|
||||
contents,
|
||||
safetySettings: [
|
||||
{
|
||||
category: HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
|
||||
threshold: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE
|
||||
},
|
||||
{
|
||||
category: HarmCategory.HARM_CATEGORY_HARASSMENT,
|
||||
threshold: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE
|
||||
},
|
||||
{
|
||||
category: HarmCategory.HARM_CATEGORY_HATE_SPEECH,
|
||||
threshold: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE
|
||||
},
|
||||
{
|
||||
category: HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
|
||||
threshold: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE
|
||||
}
|
||||
],
|
||||
generationConfig: {
|
||||
maxOutputTokens: this.config.maxTokensOutput,
|
||||
temperature: 0,
|
||||
topP: 0.1
|
||||
}
|
||||
});
|
||||
|
||||
return result.response.text();
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const geminiError = error.response.data.error;
|
||||
if (geminiError) throw new Error(geminiError?.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
44
src/engine/ollama.ts
Normal file
44
src/engine/ollama.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
|
||||
export class OllamaAi implements AiEngine {
|
||||
config: OllamaConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:11434/api/chat',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const params = {
|
||||
model: this.config.model ?? 'mistral',
|
||||
messages,
|
||||
options: { temperature: 0, top_p: 0.1 },
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
|
||||
const message = response.data.message;
|
||||
|
||||
return message?.content;
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`Ollama provider error: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
59
src/engine/openAi.ts
Normal file
59
src/engine/openAi.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OpenAiConfig extends AiEngineConfig {}
|
||||
|
||||
export class OpenAiEngine implements AiEngine {
|
||||
config: OpenAiConfig;
|
||||
client: OpenAI;
|
||||
|
||||
constructor(config: OpenAiConfig) {
|
||||
this.config = config;
|
||||
this.client = new OpenAI({ apiKey: config.apiKey });
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null> => {
|
||||
const params = {
|
||||
model: this.config.model,
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
};
|
||||
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (
|
||||
REQUEST_TOKENS >
|
||||
this.config.maxTokensInput - this.config.maxTokensOutput
|
||||
)
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
|
||||
const completion = await this.client.chat.completions.create(params);
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError) throw new Error(openAiError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
47
src/engine/testAi.ts
Normal file
47
src/engine/testAi.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
import { AiEngine } from './Engine';
|
||||
|
||||
export const TEST_MOCK_TYPES = [
|
||||
'commit-message',
|
||||
'prompt-module-commitlint-config'
|
||||
] as const;
|
||||
|
||||
export type TestMockType = (typeof TEST_MOCK_TYPES)[number];
|
||||
|
||||
type TestAiEngine = Partial<AiEngine> & {
|
||||
mockType: TestMockType;
|
||||
};
|
||||
|
||||
export class TestAi implements TestAiEngine {
|
||||
mockType: TestMockType;
|
||||
|
||||
// those are not used in the test engine
|
||||
config: any;
|
||||
client: any;
|
||||
// ---
|
||||
|
||||
constructor(mockType: TestMockType) {
|
||||
this.mockType = mockType;
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
_messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
switch (this.mockType) {
|
||||
case 'commit-message':
|
||||
return 'fix(testAi.ts): test commit message';
|
||||
case 'prompt-module-commitlint-config':
|
||||
return (
|
||||
`{\n` +
|
||||
` "localLanguage": "english",\n` +
|
||||
` "commitFix": "fix(server): Change 'port' variable to uppercase 'PORT'",\n` +
|
||||
` "commitFeat": "feat(server): Allow server to listen on a port specified through environment variable",\n` +
|
||||
` "commitDescription": "Change 'port' variable to uppercase 'PORT'. Allow server to listen on a port specified through environment variable."\n` +
|
||||
`}`
|
||||
);
|
||||
default:
|
||||
throw Error('unsupported test mock type');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,25 +1,27 @@
|
||||
import {
|
||||
ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestMessageRoleEnum
|
||||
} from 'openai';
|
||||
|
||||
import { api } from './api';
|
||||
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
|
||||
import { OpenAI } from 'openai';
|
||||
import { DEFAULT_TOKEN_LIMITS, getConfig } from './commands/config';
|
||||
import { getMainCommitPrompt } from './prompts';
|
||||
import { getEngine } from './utils/engine';
|
||||
import { mergeDiffs } from './utils/mergeDiffs';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
const config = getConfig();
|
||||
const MAX_TOKENS_INPUT =
|
||||
config.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
const MAX_TOKENS_OUTPUT =
|
||||
config.OCO_TOKENS_MAX_OUTPUT ||
|
||||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
|
||||
const generateCommitMessageChatCompletionPrompt = async (
|
||||
diff: string
|
||||
): Promise<Array<ChatCompletionRequestMessage>> => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
|
||||
diff: string,
|
||||
fullGitMojiSpec: boolean
|
||||
): Promise<Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>> => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
|
||||
|
||||
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
|
||||
|
||||
chatContextAsCompletionRequest.push({
|
||||
role: ChatCompletionRequestMessageRoleEnum.User,
|
||||
role: 'user',
|
||||
content: diff
|
||||
});
|
||||
|
||||
@@ -29,45 +31,52 @@ const generateCommitMessageChatCompletionPrompt = async (
|
||||
export enum GenerateCommitMessageErrorEnum {
|
||||
tooMuchTokens = 'TOO_MUCH_TOKENS',
|
||||
internalError = 'INTERNAL_ERROR',
|
||||
emptyMessage = 'EMPTY_MESSAGE'
|
||||
emptyMessage = 'EMPTY_MESSAGE',
|
||||
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
|
||||
}
|
||||
|
||||
const ADJUSTMENT_FACTOR = 20;
|
||||
|
||||
export const generateCommitMessageByDiff = async (
|
||||
diff: string
|
||||
diff: string,
|
||||
fullGitMojiSpec: boolean = false
|
||||
): Promise<string> => {
|
||||
try {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
|
||||
|
||||
const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map(
|
||||
(msg) => tokenCount(msg.content) + 4
|
||||
(msg) => tokenCount(msg.content as string) + 4
|
||||
).reduce((a, b) => a + b, 0);
|
||||
|
||||
const MAX_REQUEST_TOKENS =
|
||||
DEFAULT_MODEL_TOKEN_LIMIT -
|
||||
MAX_TOKENS_INPUT -
|
||||
ADJUSTMENT_FACTOR -
|
||||
INIT_MESSAGES_PROMPT_LENGTH -
|
||||
config?.OCO_OPENAI_MAX_TOKENS;
|
||||
MAX_TOKENS_OUTPUT;
|
||||
|
||||
if (tokenCount(diff) >= MAX_REQUEST_TOKENS) {
|
||||
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs(
|
||||
diff,
|
||||
MAX_REQUEST_TOKENS
|
||||
MAX_REQUEST_TOKENS,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
const commitMessages = [];
|
||||
const commitMessages = [] as string[];
|
||||
for (const promise of commitMessagePromises) {
|
||||
commitMessages.push(await promise);
|
||||
commitMessages.push((await promise) as string);
|
||||
await delay(2000);
|
||||
}
|
||||
|
||||
return commitMessages.join('\n\n');
|
||||
}
|
||||
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(diff);
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
diff,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
const commitMessage = await api.generateCommitMessage(messages);
|
||||
const engine = getEngine();
|
||||
const commitMessage = await engine.generateCommitMessage(messages);
|
||||
|
||||
if (!commitMessage)
|
||||
throw new Error(GenerateCommitMessageErrorEnum.emptyMessage);
|
||||
@@ -81,7 +90,8 @@ export const generateCommitMessageByDiff = async (
|
||||
function getMessagesPromisesByChangesInFile(
|
||||
fileDiff: string,
|
||||
separator: string,
|
||||
maxChangeLength: number
|
||||
maxChangeLength: number,
|
||||
fullGitMojiSpec: boolean
|
||||
) {
|
||||
const hunkHeaderSeparator = '@@ ';
|
||||
const [fileHeader, ...fileDiffByLines] = fileDiff.split(hunkHeaderSeparator);
|
||||
@@ -92,7 +102,7 @@ function getMessagesPromisesByChangesInFile(
|
||||
maxChangeLength
|
||||
);
|
||||
|
||||
const lineDiffsWithHeader = [];
|
||||
const lineDiffsWithHeader = [] as string[];
|
||||
for (const change of mergedChanges) {
|
||||
const totalChange = fileHeader + change;
|
||||
if (tokenCount(totalChange) > maxChangeLength) {
|
||||
@@ -104,13 +114,15 @@ function getMessagesPromisesByChangesInFile(
|
||||
}
|
||||
}
|
||||
|
||||
const engine = getEngine();
|
||||
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
|
||||
async (lineDiff) => {
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
separator + lineDiff
|
||||
separator + lineDiff,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
return api.generateCommitMessage(messages);
|
||||
return engine.generateCommitMessage(messages);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -119,9 +131,13 @@ function getMessagesPromisesByChangesInFile(
|
||||
|
||||
function splitDiff(diff: string, maxChangeLength: number) {
|
||||
const lines = diff.split('\n');
|
||||
const splitDiffs = [];
|
||||
const splitDiffs = [] as string[];
|
||||
let currentDiff = '';
|
||||
|
||||
if (maxChangeLength <= 0) {
|
||||
throw new Error(GenerateCommitMessageErrorEnum.outputTokensTooHigh);
|
||||
}
|
||||
|
||||
for (let line of lines) {
|
||||
// If a single line exceeds maxChangeLength, split it into multiple lines
|
||||
while (tokenCount(line) > maxChangeLength) {
|
||||
@@ -151,7 +167,8 @@ function splitDiff(diff: string, maxChangeLength: number) {
|
||||
|
||||
export const getCommitMsgsPromisesFromFileDiffs = async (
|
||||
diff: string,
|
||||
maxDiffLength: number
|
||||
maxDiffLength: number,
|
||||
fullGitMojiSpec: boolean
|
||||
) => {
|
||||
const separator = 'diff --git ';
|
||||
|
||||
@@ -160,7 +177,7 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
|
||||
// merge multiple files-diffs into 1 prompt to save tokens
|
||||
const mergedFilesDiffs = mergeDiffs(diffByFiles, maxDiffLength);
|
||||
|
||||
const commitMessagePromises = [];
|
||||
const commitMessagePromises = [] as Promise<string | null | undefined>[];
|
||||
|
||||
for (const fileDiff of mergedFilesDiffs) {
|
||||
if (tokenCount(fileDiff) >= maxDiffLength) {
|
||||
@@ -168,16 +185,19 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
|
||||
const messagesPromises = getMessagesPromisesByChangesInFile(
|
||||
fileDiff,
|
||||
separator,
|
||||
maxDiffLength
|
||||
maxDiffLength,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
commitMessagePromises.push(...messagesPromises);
|
||||
} else {
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
separator + fileDiff
|
||||
separator + fileDiff,
|
||||
fullGitMojiSpec
|
||||
);
|
||||
|
||||
commitMessagePromises.push(api.generateCommitMessage(messages));
|
||||
const engine = getEngine();
|
||||
commitMessagePromises.push(engine.generateCommitMessage(messages));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { unlinkSync, writeFileSync } from 'fs';
|
||||
|
||||
import core from '@actions/core';
|
||||
import exec from '@actions/exec';
|
||||
import github from '@actions/github';
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
import { PushEvent } from '@octokit/webhooks-types';
|
||||
|
||||
import { unlinkSync, writeFileSync } from 'fs';
|
||||
import { generateCommitMessageByDiff } from './generateCommitMessageFromGitDiff';
|
||||
import { randomIntFromInterval } from './utils/randomIntFromInterval';
|
||||
import { sleep } from './utils/sleep';
|
||||
@@ -54,7 +52,7 @@ async function improveMessagesInChunks(diffsAndSHAs: DiffAndSHA[]) {
|
||||
const chunkSize = diffsAndSHAs!.length % 2 === 0 ? 4 : 3;
|
||||
outro(`Improving commit messages in chunks of ${chunkSize}.`);
|
||||
const improvePromises = diffsAndSHAs!.map((commit) =>
|
||||
generateCommitMessageByDiff(commit.diff)
|
||||
generateCommitMessageByDiff(commit.diff, false)
|
||||
);
|
||||
|
||||
let improvedMessagesAndSHAs: MsgAndSHA[] = [];
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"localLanguage": "简体中文",
|
||||
"commitFix": "修复(server.ts):将端口变量从小写port改为大写PORT",
|
||||
"commitFeat": "功能(server.ts):添加对process.env.PORT环境变量的支持",
|
||||
"commitFix": "fix(server.ts):将端口变量从小写port改为大写PORT",
|
||||
"commitFeat": "feat(server.ts):添加对process.env.PORT环境变量的支持",
|
||||
"commitDescription": "现在端口变量被命名为PORT,这提高了命名约定的一致性,因为PORT是一个常量。环境变量的支持使应用程序更加灵活,因为它现在可以通过process.env.PORT环境变量在任何可用端口上运行。"
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { spinner } from '@clack/prompts';
|
||||
|
||||
import { api } from '../../api';
|
||||
import { getConfig } from '../../commands/config';
|
||||
import { i18n, I18nLocals } from '../../i18n';
|
||||
import { getEngine } from '../../utils/engine';
|
||||
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
|
||||
import { computeHash } from './crypto';
|
||||
import { commitlintPrompts, inferPromptsFromCommitlintConfig } from './prompts';
|
||||
@@ -11,7 +11,7 @@ import { CommitlintLLMConfig } from './types';
|
||||
import * as utils from './utils';
|
||||
|
||||
const config = getConfig();
|
||||
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
const translation = i18n[(config.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
|
||||
export const configureCommitlintIntegration = async (force = false) => {
|
||||
const spin = spinner();
|
||||
@@ -19,7 +19,16 @@ export const configureCommitlintIntegration = async (force = false) => {
|
||||
|
||||
const fileExists = await utils.commitlintLLMConfigExists();
|
||||
|
||||
let commitLintConfig = await getCommitLintPWDConfig();
|
||||
const commitLintConfig = await getCommitLintPWDConfig();
|
||||
if (commitLintConfig === null) {
|
||||
throw new Error(
|
||||
`Failed to load @commitlint config. Please check the following:
|
||||
* @commitlint >= 9.0.0 is installed in the local directory.
|
||||
* 'node_modules/@commitlint/load' package exists.
|
||||
* A valid @commitlint configuration exists.
|
||||
`
|
||||
);
|
||||
}
|
||||
|
||||
// debug complete @commitlint configuration
|
||||
// await fs.writeFile(
|
||||
@@ -55,11 +64,16 @@ export const configureCommitlintIntegration = async (force = false) => {
|
||||
// consistencyPrompts.map((p) => p.content)
|
||||
// );
|
||||
|
||||
const engine = getEngine();
|
||||
let consistency =
|
||||
(await api.generateCommitMessage(consistencyPrompts)) || '{}';
|
||||
(await engine.generateCommitMessage(consistencyPrompts)) || '{}';
|
||||
|
||||
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
|
||||
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));
|
||||
|
||||
// sometimes consistency is preceded by explanatory text like "Here is your JSON:"
|
||||
consistency = utils.getJSONBlock(consistency);
|
||||
|
||||
// ... remaining might be extra set of "\n"
|
||||
consistency = utils.removeDoubleNewlines(consistency);
|
||||
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import chalk from 'chalk';
|
||||
import {
|
||||
ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestMessageRoleEnum
|
||||
} from 'openai';
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
import { outro } from '@clack/prompts';
|
||||
import {
|
||||
@@ -17,7 +14,7 @@ import { i18n, I18nLocals } from '../../i18n';
|
||||
import { IDENTITY, INIT_DIFF_PROMPT } from '../../prompts';
|
||||
|
||||
const config = getConfig();
|
||||
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
const translation = i18n[(config.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
|
||||
type DeepPartial<T> = {
|
||||
[P in keyof T]?: {
|
||||
@@ -214,11 +211,10 @@ const STRUCTURE_OF_COMMIT = `
|
||||
// Prompt to generate LLM-readable rules based on @commitlint rules.
|
||||
const GEN_COMMITLINT_CONSISTENCY_PROMPT = (
|
||||
prompts: string[]
|
||||
): ChatCompletionRequestMessage[] => [
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam[] => [
|
||||
{
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
// prettier-ignore
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
|
||||
|
||||
Here are the specific requirements and conventions that should be strictly followed:
|
||||
|
||||
@@ -260,21 +256,33 @@ Example Git Diff is to follow:`
|
||||
const INIT_MAIN_PROMPT = (
|
||||
language: string,
|
||||
prompts: string[]
|
||||
): ChatCompletionRequestMessage => ({
|
||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||
// prettier-ignore
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.' : 'Do not preface the commit with anything.'}
|
||||
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.' : "Don't add any descriptions to the commit, only commit message."}
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${
|
||||
config.OCO_WHY ? 'and WHY the changes were done' : ''
|
||||
}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${
|
||||
config.OCO_EMOJI
|
||||
? 'Use GitMoji convention to preface the commit.'
|
||||
: 'Do not preface the commit with anything.'
|
||||
}
|
||||
${
|
||||
config.OCO_DESCRIPTION
|
||||
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
|
||||
: "Don't add any descriptions to the commit, only commit message."
|
||||
}
|
||||
Use the present tense. Use ${language} to answer.
|
||||
${
|
||||
config.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
: ''
|
||||
}
|
||||
|
||||
You will strictly follow the following conventions to generate the content of the commit message:
|
||||
- ${prompts.join('\n- ')}
|
||||
|
||||
The conventions refers to the following structure of commit message:
|
||||
${STRUCTURE_OF_COMMIT}
|
||||
|
||||
`
|
||||
${STRUCTURE_OF_COMMIT}`
|
||||
});
|
||||
|
||||
export const commitlintPrompts = {
|
||||
|
||||
@@ -1,11 +1,41 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
const nodeModulesPath = path.join(
|
||||
process.env.PWD as string,
|
||||
'node_modules',
|
||||
'@commitlint',
|
||||
'load'
|
||||
);
|
||||
const findModulePath = (moduleName: string) => {
|
||||
const searchPaths = [
|
||||
path.join('node_modules', moduleName),
|
||||
path.join('node_modules', '.pnpm')
|
||||
];
|
||||
|
||||
for (const basePath of searchPaths) {
|
||||
try {
|
||||
const resolvedPath = require.resolve(moduleName, { paths: [basePath] });
|
||||
return resolvedPath;
|
||||
} catch {
|
||||
// Continue to the next search path if the module is not found
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Cannot find module ${moduleName}`);
|
||||
};
|
||||
|
||||
const getCommitLintModuleType = async (): Promise<'cjs' | 'esm'> => {
|
||||
const packageFile = '@commitlint/load/package.json';
|
||||
const packageJsonPath = findModulePath(packageFile);
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
if (!packageJson) {
|
||||
throw new Error(`Failed to parse ${packageFile}`);
|
||||
}
|
||||
|
||||
return packageJson.type === 'module' ? 'esm' : 'cjs';
|
||||
};
|
||||
|
||||
/**
|
||||
* QualifiedConfig from any version of @commitlint/types
|
||||
* @see https://github.com/conventional-changelog/commitlint/blob/master/@commitlint/types/src/load.ts
|
||||
*/
|
||||
type QualifiedConfigOnAnyVersion = { [key: string]: unknown };
|
||||
|
||||
/**
|
||||
* This code is loading the configuration for the `@commitlint` package from the current working
|
||||
@@ -13,13 +43,31 @@ const nodeModulesPath = path.join(
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
export const getCommitLintPWDConfig = async () => {
|
||||
const load = require(nodeModulesPath).default;
|
||||
export const getCommitLintPWDConfig =
|
||||
async (): Promise<QualifiedConfigOnAnyVersion | null> => {
|
||||
let load: Function, modulePath: string;
|
||||
switch (await getCommitLintModuleType()) {
|
||||
case 'cjs':
|
||||
/**
|
||||
* CommonJS (<= commitlint@v18.x.x.)
|
||||
*/
|
||||
modulePath = findModulePath('@commitlint/load');
|
||||
load = require(modulePath).default;
|
||||
break;
|
||||
case 'esm':
|
||||
/**
|
||||
* ES Module (commitlint@v19.x.x. <= )
|
||||
* Directory import is not supported in ES Module resolution, so import the file directly
|
||||
*/
|
||||
modulePath = await findModulePath('@commitlint/load/lib/load.js');
|
||||
load = (await import(modulePath)).default;
|
||||
break;
|
||||
}
|
||||
|
||||
if (load && typeof load === 'function') {
|
||||
return await load();
|
||||
}
|
||||
if (load && typeof load === 'function') {
|
||||
return await load();
|
||||
}
|
||||
|
||||
// @commitlint/load is not a function
|
||||
return null;
|
||||
};
|
||||
// @commitlint/load is not a function
|
||||
return null;
|
||||
};
|
||||
|
||||
@@ -16,6 +16,16 @@ export const removeDoubleNewlines = (input: string): string => {
|
||||
return input;
|
||||
};
|
||||
|
||||
export const getJSONBlock = (input: string): string => {
|
||||
const jsonIndex = input.search('```json');
|
||||
if (jsonIndex > -1) {
|
||||
input = input.slice(jsonIndex + 8);
|
||||
const endJsonIndex = input.search('```');
|
||||
input = input.slice(0, endJsonIndex);
|
||||
}
|
||||
return input;
|
||||
};
|
||||
|
||||
export const commitlintLLMConfigExists = async (): Promise<boolean> => {
|
||||
let exists;
|
||||
try {
|
||||
|
||||
177
src/prompts.ts
177
src/prompts.ts
@@ -1,35 +1,141 @@
|
||||
import {
|
||||
ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestMessageRoleEnum
|
||||
} from 'openai';
|
||||
|
||||
import { note } from '@clack/prompts';
|
||||
|
||||
import { OpenAI } from 'openai';
|
||||
import { getConfig } from './commands/config';
|
||||
import { i18n, I18nLocals } from './i18n';
|
||||
import { configureCommitlintIntegration } from './modules/commitlint/config';
|
||||
import { commitlintPrompts } from './modules/commitlint/prompts';
|
||||
import { ConsistencyPrompt } from './modules/commitlint/types';
|
||||
import * as utils from './modules/commitlint/utils';
|
||||
import { removeConventionalCommitWord } from './utils/removeConventionalCommitWord';
|
||||
|
||||
const config = getConfig();
|
||||
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
const translation = i18n[(config.OCO_LANGUAGE as I18nLocals) || 'en'];
|
||||
|
||||
export const IDENTITY =
|
||||
'You are to act as the author of a commit message in git.';
|
||||
'You are to act as an author of a commit message in git.';
|
||||
|
||||
const INIT_MAIN_PROMPT = (language: string): ChatCompletionRequestMessage => ({
|
||||
role: ChatCompletionRequestMessageRoleEnum.System,
|
||||
// prettier-ignore
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the conventional commit convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.' : 'Do not preface the commit with anything.'}
|
||||
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.' : "Don't add any descriptions to the commit, only commit message."}
|
||||
Use the present tense. Lines must not be longer than 74 characters. Use ${language} to answer.`
|
||||
const GITMOJI_HELP = `Use GitMoji convention to preface the commit. Here are some help to choose the right emoji (emoji, description):
|
||||
🐛, Fix a bug;
|
||||
✨, Introduce new features;
|
||||
📝, Add or update documentation;
|
||||
🚀, Deploy stuff;
|
||||
✅, Add, update, or pass tests;
|
||||
♻️, Refactor code;
|
||||
⬆️, Upgrade dependencies;
|
||||
🔧, Add or update configuration files;
|
||||
🌐, Internationalization and localization;
|
||||
💡, Add or update comments in source code;`;
|
||||
|
||||
const FULL_GITMOJI_SPEC = `${GITMOJI_HELP}
|
||||
🎨, Improve structure / format of the code;
|
||||
⚡️, Improve performance;
|
||||
🔥, Remove code or files;
|
||||
🚑️, Critical hotfix;
|
||||
💄, Add or update the UI and style files;
|
||||
🎉, Begin a project;
|
||||
🔒️, Fix security issues;
|
||||
🔐, Add or update secrets;
|
||||
🔖, Release / Version tags;
|
||||
🚨, Fix compiler / linter warnings;
|
||||
🚧, Work in progress;
|
||||
💚, Fix CI Build;
|
||||
⬇️, Downgrade dependencies;
|
||||
📌, Pin dependencies to specific versions;
|
||||
👷, Add or update CI build system;
|
||||
📈, Add or update analytics or track code;
|
||||
➕, Add a dependency;
|
||||
➖, Remove a dependency;
|
||||
🔨, Add or update development scripts;
|
||||
✏️, Fix typos;
|
||||
💩, Write bad code that needs to be improved;
|
||||
⏪️, Revert changes;
|
||||
🔀, Merge branches;
|
||||
📦️, Add or update compiled files or packages;
|
||||
👽️, Update code due to external API changes;
|
||||
🚚, Move or rename resources (e.g.: files, paths, routes);
|
||||
📄, Add or update license;
|
||||
💥, Introduce breaking changes;
|
||||
🍱, Add or update assets;
|
||||
♿️, Improve accessibility;
|
||||
🍻, Write code drunkenly;
|
||||
💬, Add or update text and literals;
|
||||
🗃️, Perform database related changes;
|
||||
🔊, Add or update logs;
|
||||
🔇, Remove logs;
|
||||
👥, Add or update contributor(s);
|
||||
🚸, Improve user experience / usability;
|
||||
🏗️, Make architectural changes;
|
||||
📱, Work on responsive design;
|
||||
🤡, Mock things;
|
||||
🥚, Add or update an easter egg;
|
||||
🙈, Add or update a .gitignore file;
|
||||
📸, Add or update snapshots;
|
||||
⚗️, Perform experiments;
|
||||
🔍️, Improve SEO;
|
||||
🏷️, Add or update types;
|
||||
🌱, Add or update seed files;
|
||||
🚩, Add, update, or remove feature flags;
|
||||
🥅, Catch errors;
|
||||
💫, Add or update animations and transitions;
|
||||
🗑️, Deprecate code that needs to be cleaned up;
|
||||
🛂, Work on code related to authorization, roles and permissions;
|
||||
🩹, Simple fix for a non-critical issue;
|
||||
🧐, Data exploration/inspection;
|
||||
⚰️, Remove dead code;
|
||||
🧪, Add a failing test;
|
||||
👔, Add or update business logic;
|
||||
🩺, Add or update healthcheck;
|
||||
🧱, Infrastructure related changes;
|
||||
🧑💻, Improve developer experience;
|
||||
💸, Add sponsorships or money related infrastructure;
|
||||
🧵, Add or update code related to multithreading or concurrency;
|
||||
🦺, Add or update code related to validation.`;
|
||||
|
||||
const CONVENTIONAL_COMMIT_KEYWORDS =
|
||||
'Do not preface the commit with anything, except for the conventional commit keywords: fix, feat, build, chore, ci, docs, style, refactor, perf, test.';
|
||||
|
||||
const getCommitConvention = (fullGitMojiSpec: boolean) =>
|
||||
config.OCO_EMOJI
|
||||
? fullGitMojiSpec
|
||||
? FULL_GITMOJI_SPEC
|
||||
: GITMOJI_HELP
|
||||
: CONVENTIONAL_COMMIT_KEYWORDS;
|
||||
|
||||
const getDescriptionInstruction = () =>
|
||||
config.OCO_DESCRIPTION
|
||||
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
|
||||
: "Don't add any descriptions to the commit, only commit message.";
|
||||
|
||||
const getOneLineCommitInstruction = () =>
|
||||
config.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
: '';
|
||||
|
||||
const INIT_MAIN_PROMPT = (
|
||||
language: string,
|
||||
fullGitMojiSpec: boolean
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'system',
|
||||
content: (() => {
|
||||
const commitConvention = fullGitMojiSpec
|
||||
? 'GitMoji specification'
|
||||
: 'Conventional Commit Convention';
|
||||
const missionStatement = `${IDENTITY} Your mission is to create clean and comprehensive commit messages as per the ${commitConvention} and explain WHAT were the changes and mainly WHY the changes were done.`;
|
||||
const diffInstruction =
|
||||
"I'll send you an output of 'git diff --staged' command, and you are to convert it into a commit message.";
|
||||
const conventionGuidelines = getCommitConvention(fullGitMojiSpec);
|
||||
const descriptionGuideline = getDescriptionInstruction();
|
||||
const oneLineCommitGuideline = getOneLineCommitInstruction();
|
||||
const generalGuidelines = `Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`;
|
||||
|
||||
return `${missionStatement}\n${diffInstruction}\n${conventionGuidelines}\n${descriptionGuideline}\n${oneLineCommitGuideline}\n${generalGuidelines}`;
|
||||
})()
|
||||
});
|
||||
|
||||
export const INIT_DIFF_PROMPT: ChatCompletionRequestMessage = {
|
||||
role: ChatCompletionRequestMessageRoleEnum.User,
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessageParam =
|
||||
{
|
||||
role: 'user',
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
index ad4db42..f3b18a9 100644
|
||||
--- a/src/server.ts
|
||||
+++ b/src/server.ts
|
||||
@@ -53,21 +159,35 @@ export const INIT_DIFF_PROMPT: ChatCompletionRequestMessage = {
|
||||
+app.listen(process.env.PORT || PORT, () => {
|
||||
+ console.log(\`Server listening on port \${PORT}\`);
|
||||
});`
|
||||
};
|
||||
|
||||
const getContent = (translation: ConsistencyPrompt) => {
|
||||
const fix = config.OCO_EMOJI
|
||||
? `🐛 ${removeConventionalCommitWord(translation.commitFix)}`
|
||||
: translation.commitFix;
|
||||
|
||||
const feat = config.OCO_EMOJI
|
||||
? `✨ ${removeConventionalCommitWord(translation.commitFeat)}`
|
||||
: translation.commitFeat;
|
||||
|
||||
const description = config.OCO_DESCRIPTION
|
||||
? translation.commitDescription
|
||||
: '';
|
||||
|
||||
return `${fix}\n${feat}\n${description}`;
|
||||
};
|
||||
|
||||
const INIT_CONSISTENCY_PROMPT = (
|
||||
translation: ConsistencyPrompt
|
||||
): ChatCompletionRequestMessage => ({
|
||||
role: ChatCompletionRequestMessageRoleEnum.Assistant,
|
||||
content: `${config?.OCO_EMOJI ? '🐛 ' : ''}${translation.commitFix}
|
||||
${config?.OCO_EMOJI ? '✨ ' : ''}${translation.commitFeat}
|
||||
${config?.OCO_DESCRIPTION ? translation.commitDescription : ''}`
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'assistant',
|
||||
content: getContent(translation)
|
||||
});
|
||||
|
||||
export const getMainCommitPrompt = async (): Promise<
|
||||
ChatCompletionRequestMessage[]
|
||||
> => {
|
||||
switch (config?.OCO_PROMPT_MODULE) {
|
||||
export const getMainCommitPrompt = async (
|
||||
fullGitMojiSpec: boolean
|
||||
): Promise<Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>> => {
|
||||
switch (config.OCO_PROMPT_MODULE) {
|
||||
case '@commitlint':
|
||||
if (!(await utils.commitlintLLMConfigExists())) {
|
||||
note(
|
||||
@@ -93,9 +213,8 @@ export const getMainCommitPrompt = async (): Promise<
|
||||
];
|
||||
|
||||
default:
|
||||
// conventional-commit
|
||||
return [
|
||||
INIT_MAIN_PROMPT(translation.localLanguage),
|
||||
INIT_MAIN_PROMPT(translation.localLanguage, fullGitMojiSpec),
|
||||
INIT_DIFF_PROMPT,
|
||||
INIT_CONSISTENCY_PROMPT(translation)
|
||||
];
|
||||
|
||||
@@ -3,7 +3,7 @@ import chalk from 'chalk';
|
||||
import { outro } from '@clack/prompts';
|
||||
|
||||
import currentPackage from '../../package.json';
|
||||
import { getOpenCommitLatestVersion } from '../api';
|
||||
import { getOpenCommitLatestVersion } from '../version';
|
||||
|
||||
export const checkIsLatestVersion = async () => {
|
||||
const latestVersion = await getOpenCommitLatestVersion();
|
||||
|
||||
65
src/utils/engine.ts
Normal file
65
src/utils/engine.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { getConfig, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
import { AnthropicEngine } from '../engine/anthropic';
|
||||
import { AzureEngine } from '../engine/azure';
|
||||
import { AiEngine } from '../engine/Engine';
|
||||
import { FlowiseAi } from '../engine/flowise';
|
||||
import { Gemini } from '../engine/gemini';
|
||||
import { OllamaAi } from '../engine/ollama';
|
||||
import { OpenAiEngine } from '../engine/openAi';
|
||||
import { TestAi, TestMockType } from '../engine/testAi';
|
||||
|
||||
export function getEngine(): AiEngine {
|
||||
const config = getConfig();
|
||||
const provider = config.OCO_AI_PROVIDER;
|
||||
|
||||
const DEFAULT_CONFIG = {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_OPENAI_BASE_PATH!
|
||||
};
|
||||
|
||||
switch (provider) {
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: '',
|
||||
baseURL: config.OCO_OLLAMA_API_URL!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_ANTHROPIC_API_KEY!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.TEST:
|
||||
return new TestAi(config.OCO_TEST_MOCK_TYPE as TestMockType);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_GEMINI_API_KEY!,
|
||||
baseURL: config.OCO_GEMINI_BASE_PATH!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AZURE:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_AZURE_API_KEY!
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.FLOWISE:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG,
|
||||
baseURL: config.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
|
||||
apiKey: config.OCO_FLOWISE_API_KEY!
|
||||
});
|
||||
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_OPENAI_API_KEY!
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -75,8 +75,11 @@ export const getChangedFiles = async (): Promise<string[]> => {
|
||||
|
||||
export const gitAdd = async ({ files }: { files: string[] }) => {
|
||||
const gitAddSpinner = spinner();
|
||||
|
||||
gitAddSpinner.start('Adding files to commit');
|
||||
|
||||
await execa('git', ['add', ...files]);
|
||||
|
||||
gitAddSpinner.stop('Done');
|
||||
};
|
||||
|
||||
|
||||
3
src/utils/removeConventionalCommitWord.ts
Normal file
3
src/utils/removeConventionalCommitWord.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function removeConventionalCommitWord(message: string): string {
|
||||
return message.replace(/^(fix|feat)\((.+?)\):/, '($2):');
|
||||
}
|
||||
14
src/version.ts
Normal file
14
src/version.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { outro } from '@clack/prompts';
|
||||
import { execa } from 'execa';
|
||||
|
||||
export const getOpenCommitLatestVersion = async (): Promise<
|
||||
string | undefined
|
||||
> => {
|
||||
try {
|
||||
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
|
||||
return stdout;
|
||||
} catch (_) {
|
||||
outro('Error while getting the latest version of opencommit');
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
22
test/Dockerfile
Normal file
22
test/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
FROM ubuntu:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y curl git
|
||||
|
||||
# Install Node.js v20
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Setup git
|
||||
RUN git config --global user.email "test@example.com"
|
||||
RUN git config --global user.name "Test User"
|
||||
|
||||
WORKDIR /app
|
||||
COPY package.json /app/
|
||||
COPY package-lock.json /app/
|
||||
|
||||
RUN ls -la
|
||||
|
||||
RUN npm ci
|
||||
COPY . /app
|
||||
RUN ls -la
|
||||
RUN npm run build
|
||||
12
test/e2e/noChanges.test.ts
Normal file
12
test/e2e/noChanges.test.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
|
||||
it('cli flow when there are no changes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const { findByText } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await findByText('No changes detected')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
55
test/e2e/oneFile.test.ts
Normal file
55
test/e2e/oneFile.test.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
|
||||
it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
|
||||
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('cli flow to generate commit message for 1 changed file (not staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
await render('git' ,[`commit -m 'add new file'`], { cwd: gitDir });
|
||||
|
||||
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
|
||||
expect(await findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully committed')).toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
222
test/e2e/prompt-module/commitlint.test.ts
Normal file
222
test/e2e/prompt-module/commitlint.test.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { resolve } from 'path';
|
||||
import { render } from 'cli-testing-library';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment, wait } from '../utils';
|
||||
import path from 'path';
|
||||
|
||||
function getAbsolutePath(relativePath: string) {
|
||||
const scriptDir = path.dirname(__filename);
|
||||
return path.resolve(scriptDir, relativePath);
|
||||
}
|
||||
async function setupCommitlint(dir: string, ver: 9 | 18 | 19) {
|
||||
let packagePath, packageJsonPath, configPath;
|
||||
switch (ver) {
|
||||
case 9:
|
||||
packagePath = getAbsolutePath('./data/commitlint_9/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_9/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_9/commitlint.config.js');
|
||||
break;
|
||||
case 18:
|
||||
packagePath = getAbsolutePath('./data/commitlint_18/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_18/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_18/commitlint.config.js');
|
||||
break;
|
||||
case 19:
|
||||
packagePath = getAbsolutePath('./data/commitlint_19/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_19/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_19/commitlint.config.js');
|
||||
break;
|
||||
}
|
||||
await render('cp', ['-r', packagePath, '.'], { cwd: dir });
|
||||
await render('cp', [packageJsonPath, '.'], { cwd: dir });
|
||||
await render('cp', [configPath, '.'], { cwd: dir });
|
||||
await wait(3000); // Avoid flakiness by waiting
|
||||
}
|
||||
|
||||
describe('cli flow to run "oco commitlint force"', () => {
|
||||
it('on commitlint@9 using CJS', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 9);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@9')).toBeInTheConsole();
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
it('on commitlint@18 using CJS', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 18);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@18')).toBeInTheConsole();
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
it('on commitlint@19 using ESM', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 19);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@19')).toBeInTheConsole();
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
});
|
||||
|
||||
describe('cli flow to generate commit message using @commitlint prompt-module', () => {
|
||||
it('on commitlint@19 using ESM', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
// Setup commitlint@19
|
||||
await setupCommitlint(gitDir, 19);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@19')).toBeInTheConsole();
|
||||
|
||||
// Run `oco commitlint force`
|
||||
const commitlintForce = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(
|
||||
await commitlintForce.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
// Run `oco commitlint get`
|
||||
const commitlintGet = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint get \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await commitlintGet.findByText('consistency')).toBeInTheConsole();
|
||||
|
||||
// Run 'oco' using .opencommit-commitlint
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const oco = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='commit-message' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
node ${resolve('./out/cli.cjs')} \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
expect(
|
||||
await oco.findByText('Generating the commit message')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Choose a remote to push to')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
extends: ['@commitlint/config-conventional']
|
||||
};
|
||||
2029
test/e2e/prompt-module/data/commitlint_18/package-lock.json
generated
Normal file
2029
test/e2e/prompt-module/data/commitlint_18/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
test/e2e/prompt-module/data/commitlint_18/package.json
Normal file
15
test/e2e/prompt-module/data/commitlint_18/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "commitlint-test",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^18.0.0",
|
||||
"@commitlint/config-conventional": "^18.0.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
export default {
|
||||
extends: ['@commitlint/config-conventional']
|
||||
};
|
||||
1453
test/e2e/prompt-module/data/commitlint_19/package-lock.json
generated
Normal file
1453
test/e2e/prompt-module/data/commitlint_19/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
16
test/e2e/prompt-module/data/commitlint_19/package.json
Normal file
16
test/e2e/prompt-module/data/commitlint_19/package.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "commitlint-test",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^19.0.0",
|
||||
"@commitlint/config-conventional": "^19.0.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
extends: ['@commitlint/config-conventional']
|
||||
};
|
||||
1671
test/e2e/prompt-module/data/commitlint_9/package-lock.json
generated
Normal file
1671
test/e2e/prompt-module/data/commitlint_9/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
test/e2e/prompt-module/data/commitlint_9/package.json
Normal file
15
test/e2e/prompt-module/data/commitlint_9/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "commitlint-test",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^9.0.0",
|
||||
"@commitlint/config-conventional": "^9.0.0"
|
||||
}
|
||||
}
|
||||
11
test/e2e/setup.sh
Executable file
11
test/e2e/setup.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
current_dir=$(pwd)
|
||||
setup_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
|
||||
# Set up for prompt-module/commitlint
|
||||
cd $setup_dir && cd prompt-module/data/commitlint_9 && npm ci
|
||||
cd $setup_dir && cd prompt-module/data/commitlint_18 && npm ci
|
||||
cd $setup_dir && cd prompt-module/data/commitlint_19 && npm ci
|
||||
|
||||
cd $current_dir
|
||||
33
test/e2e/utils.ts
Normal file
33
test/e2e/utils.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import path from 'path'
|
||||
import { mkdtemp, rm } from 'fs'
|
||||
import { promisify } from 'util';
|
||||
import { tmpdir } from 'os';
|
||||
import { exec } from 'child_process';
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsExec = promisify(exec);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* Prepare the environment for the test
|
||||
* Create a temporary git repository in the temp directory
|
||||
*/
|
||||
export const prepareEnvironment = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
}
|
||||
return {
|
||||
gitDir,
|
||||
cleanup,
|
||||
}
|
||||
}
|
||||
|
||||
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
10
test/jest-setup.ts
Normal file
10
test/jest-setup.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import 'cli-testing-library/extend-expect'
|
||||
import { configure } from 'cli-testing-library'
|
||||
import { jest } from '@jest/globals';
|
||||
|
||||
global.jest = jest;
|
||||
|
||||
/**
|
||||
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
|
||||
*/
|
||||
configure({ asyncUtilTimeout: 2000 })
|
||||
295
test/unit/config.test.ts
Normal file
295
test/unit/config.test.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { existsSync, readFileSync, rmSync } from 'fs';
|
||||
import {
|
||||
DEFAULT_CONFIG,
|
||||
getConfig,
|
||||
setConfig
|
||||
} from '../../src/commands/config';
|
||||
import { prepareFile } from './utils';
|
||||
import { dirname } from 'path';
|
||||
|
||||
describe('config', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
let globalConfigFile: { filePath: string; cleanup: () => Promise<void> };
|
||||
let envConfigFile: { filePath: string; cleanup: () => Promise<void> };
|
||||
|
||||
function resetEnv(env: NodeJS.ProcessEnv) {
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
if (!(key in env)) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = env[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
resetEnv(originalEnv);
|
||||
if (globalConfigFile) await globalConfigFile.cleanup();
|
||||
if (envConfigFile) await envConfigFile.cleanup();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (globalConfigFile) await globalConfigFile.cleanup();
|
||||
if (envConfigFile) await envConfigFile.cleanup();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
resetEnv(originalEnv);
|
||||
});
|
||||
|
||||
const generateConfig = async (
|
||||
fileName: string,
|
||||
content: Record<string, string>
|
||||
) => {
|
||||
const fileContent = Object.entries(content)
|
||||
.map(([key, value]) => `${key}="${value}"`)
|
||||
.join('\n');
|
||||
return await prepareFile(fileName, fileContent);
|
||||
};
|
||||
|
||||
describe('getConfig', () => {
|
||||
it('should prioritize local .env over global .opencommit config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-3.5-turbo',
|
||||
OCO_LANGUAGE: 'en'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'local-key',
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
|
||||
OCO_LANGUAGE: 'fr'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
|
||||
expect(config.OCO_LANGUAGE).toEqual('fr');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
});
|
||||
|
||||
it('should fallback to global config when local config is not set', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'de',
|
||||
OCO_DESCRIPTION: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('de');
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
});
|
||||
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_TOKENS_MAX_INPUT: '4096',
|
||||
OCO_TOKENS_MAX_OUTPUT: '500',
|
||||
OCO_GITPUSH: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_TOKENS_MAX_INPUT: '8192',
|
||||
OCO_ONE_LINE_COMMIT: 'false'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
|
||||
expect(config.OCO_GITPUSH).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
});
|
||||
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('es');
|
||||
});
|
||||
|
||||
it('should override global config with null values in local .env', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'null'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
|
||||
});
|
||||
|
||||
it('should handle empty global config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
envConfigFile = await generateConfig('.env', {});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setConfig', () => {
|
||||
beforeEach(async () => {
|
||||
// we create and delete the file to have the parent directory, but not the file, to test the creation of the file
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
rmSync(globalConfigFile.filePath);
|
||||
});
|
||||
|
||||
it('should create .opencommit file with DEFAULT CONFIG if it does not exist on first setConfig run', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key_1']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key_1');
|
||||
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
|
||||
expect(fileContent).toContain(`${key}=${value}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should set new config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_OPENAI_API_KEY', 'new-key'],
|
||||
['OCO_MODEL', 'gpt-4']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('new-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
});
|
||||
|
||||
it('should update existing config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'initial-key'
|
||||
});
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'updated-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('updated-key');
|
||||
});
|
||||
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_TOKENS_MAX_INPUT', '8192'],
|
||||
['OCO_DESCRIPTION', 'true'],
|
||||
['OCO_ONE_LINE_COMMIT', 'false']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
});
|
||||
|
||||
it('should throw an error for unsupported config keys', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
|
||||
try {
|
||||
await setConfig(
|
||||
[['UNSUPPORTED_KEY', 'value']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
throw new Error('NEVER_REACHED');
|
||||
} catch (error) {
|
||||
expect(error.message).toContain(
|
||||
'Unsupported config key: UNSUPPORTED_KEY'
|
||||
);
|
||||
expect(error.message).not.toContain('NEVER_REACHED');
|
||||
}
|
||||
});
|
||||
|
||||
it('should persist changes to the config file', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
});
|
||||
|
||||
it('should set multiple configs in a row and keep the changes', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent1).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
|
||||
await setConfig([['OCO_MODEL', 'gpt-4']], globalConfigFile.filePath);
|
||||
|
||||
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
|
||||
});
|
||||
});
|
||||
});
|
||||
98
test/unit/gemini.test.ts
Normal file
98
test/unit/gemini.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Gemini } from '../../src/engine/gemini';
|
||||
|
||||
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import {
|
||||
ConfigType,
|
||||
getConfig,
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
} from '../../src/commands/config';
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
describe('Gemini', () => {
|
||||
let gemini: Gemini;
|
||||
let mockConfig: ConfigType;
|
||||
let mockGoogleGenerativeAi: GoogleGenerativeAI;
|
||||
let mockGenerativeModel: GenerativeModel;
|
||||
let mockExit: jest.SpyInstance<never, [code?: number | undefined], any>;
|
||||
|
||||
const noop: (...args: any[]) => any = (...args: any[]) => {};
|
||||
|
||||
const mockGemini = () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
gemini = new Gemini({
|
||||
apiKey: mockConfig.OCO_GEMINI_API_KEY,
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
};
|
||||
|
||||
const oldEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = { ...oldEnv };
|
||||
|
||||
jest.mock('@google/generative-ai');
|
||||
jest.mock('../src/commands/config');
|
||||
|
||||
jest.mock('@clack/prompts', () => ({
|
||||
intro: jest.fn(),
|
||||
outro: jest.fn()
|
||||
}));
|
||||
|
||||
mockExit = jest.spyOn(process, 'exit').mockImplementation();
|
||||
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
|
||||
mockConfig.OCO_GEMINI_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_MODEL = 'gemini-1.5-flash';
|
||||
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(
|
||||
mockConfig.OCO_GEMINI_API_KEY
|
||||
);
|
||||
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
|
||||
model: mockConfig.OCO_MODEL
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
gemini = undefined as any;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
mockExit.mockRestore();
|
||||
process.env = oldEnv;
|
||||
});
|
||||
|
||||
it.skip('should exit process if OCO_GEMINI_API_KEY is not set and command is not config', () => {
|
||||
process.env.OCO_GEMINI_API_KEY = undefined;
|
||||
process.env.OCO_AI_PROVIDER = 'gemini';
|
||||
|
||||
mockGemini();
|
||||
|
||||
expect(mockExit).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
it('should generate commit message', async () => {
|
||||
const mockGenerateContent = jest
|
||||
.fn()
|
||||
.mockResolvedValue({ response: { text: () => 'generated content' } });
|
||||
mockGenerativeModel.generateContent = mockGenerateContent;
|
||||
|
||||
mockGemini();
|
||||
|
||||
const messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam> =
|
||||
[
|
||||
{ role: 'system', content: 'system message' },
|
||||
{ role: 'assistant', content: 'assistant message' }
|
||||
];
|
||||
|
||||
jest
|
||||
.spyOn(gemini, 'generateCommitMessage')
|
||||
.mockImplementation(async () => 'generated content');
|
||||
const result = await gemini.generateCommitMessage(messages);
|
||||
|
||||
expect(result).toEqual('generated content');
|
||||
});
|
||||
});
|
||||
32
test/unit/utils.ts
Normal file
32
test/unit/utils.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { existsSync, mkdtemp, rm, writeFile } from 'fs';
|
||||
import { tmpdir } from 'os';
|
||||
import path from 'path';
|
||||
import { promisify } from 'util';
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsRemove = promisify(rm);
|
||||
const fsWriteFile = promisify(writeFile);
|
||||
|
||||
/**
|
||||
* Prepare tmp file for the test
|
||||
*/
|
||||
export async function prepareFile(
|
||||
fileName: string,
|
||||
content: string
|
||||
): Promise<{
|
||||
filePath: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
const filePath = path.resolve(tempDir, fileName);
|
||||
await fsWriteFile(filePath, content);
|
||||
const cleanup = async () => {
|
||||
if (existsSync(tempDir)) {
|
||||
await fsRemove(tempDir, { recursive: true });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
filePath,
|
||||
cleanup
|
||||
};
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ESNext",
|
||||
"lib": ["ES5", "ES6"],
|
||||
"target": "ES2020",
|
||||
"lib": ["ES6", "ES2020"],
|
||||
|
||||
"module": "CommonJS",
|
||||
|
||||
"module": "ESNext",
|
||||
// "rootDir": "./src",
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "node",
|
||||
"moduleResolution": "Node",
|
||||
|
||||
"allowJs": true,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": ["test/jest-setup.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
|
||||
Reference in New Issue
Block a user