Compare commits

..

137 Commits

Author SHA1 Message Date
di-sukharev
bf931b0c3c chore: update package and action descriptions to better reflect the purpose of the project and remove unnecessary details
feat: add keywords to package and action descriptions to improve searchability and discoverability of the project
feat: update deploy script in package.json to include version bump and publish to npm registry with latest tag
2023-05-21 16:15:13 +08:00
di-sukharev
f687709abe build 2023-05-21 15:57:29 +08:00
di-sukharev
a969d9819e build 2023-05-21 15:53:32 +08:00
di-sukharev
d0d4b2d11d build 2023-05-21 15:35:01 +08:00
di-sukharev
d03f16c6ee build 2023-05-21 15:10:18 +08:00
di-sukharev
925f004f28 build 2023-05-21 15:10:18 +08:00
di-sukharev
f7f878b6dd build 2023-05-21 15:10:18 +08:00
di-sukharev
2f923f94ea build 2023-05-21 15:10:18 +08:00
di-sukharev
60410e6404 build 2023-05-21 15:10:18 +08:00
di-sukharev
f42e1ff9a3 build 2023-05-21 15:10:18 +08:00
di-sukharev
0c92feb3ae build 2023-05-21 15:10:18 +08:00
di-sukharev
8ae293e947 build 2023-05-21 15:10:18 +08:00
di-sukharev
68746fa17d build 2023-05-21 15:10:18 +08:00
di-sukharev
1aeed7c9ca build 2023-05-21 15:10:18 +08:00
di-sukharev
867a0158ed build 2023-05-21 15:10:18 +08:00
di-sukharev
942c2d8612 build 2023-05-21 15:10:18 +08:00
di-sukharev
2932a1fc28 build 2023-05-21 15:10:18 +08:00
di-sukharev
fef923a099 build 2023-05-21 15:10:18 +08:00
di-sukharev
851c8baff1 build 2023-05-21 15:10:18 +08:00
di-sukharev
f5517ed59f build 2023-05-21 15:10:18 +08:00
di-sukharev
3c9aabd239 build 2023-05-21 15:10:18 +08:00
di-sukharev
f506c1287c build 2023-05-21 15:10:18 +08:00
di-sukharev
1012e3948a build 2023-05-21 15:10:18 +08:00
di-sukharev
f6e2d439a4 build 2023-05-21 15:10:18 +08:00
di-sukharev
bcdc976c49 build 2023-05-21 15:10:18 +08:00
di-sukharev
ed9729e327 build 2023-05-21 15:10:18 +08:00
di-sukharev
1bdcc8adae build 2023-05-21 15:10:18 +08:00
di-sukharev
efdd98318e build 2023-05-21 15:10:18 +08:00
di-sukharev
af2e25dce0 build 2023-05-21 15:10:18 +08:00
di-sukharev
a0961f899e build 2023-05-21 15:10:18 +08:00
di-sukharev
5065c75730 build 2023-05-21 15:10:18 +08:00
di-sukharev
447a938a48 build 2023-05-21 15:10:18 +08:00
di-sukharev
b4ecce224b build 2023-05-21 15:10:18 +08:00
di-sukharev
07f07e5e18 build 2023-05-21 15:10:18 +08:00
di-sukharev
cd89054f0f build 2023-05-21 15:10:18 +08:00
di-sukharev
430f08a705 build 2023-05-21 15:10:17 +08:00
di-sukharev
65c9c729de build 2023-05-21 15:10:17 +08:00
di-sukharev
6c3bcdb6f9 build 2023-05-21 15:10:17 +08:00
di-sukharev
730bebe8f5 build 2023-05-21 15:10:17 +08:00
di-sukharev
2265e92e84 build 2023-05-21 15:10:17 +08:00
di-sukharev
939deca46f build 2023-05-21 15:10:17 +08:00
di-sukharev
d715685f0d build 2023-05-21 15:10:17 +08:00
di-sukharev
f2cf084bbf build 2023-05-21 15:10:17 +08:00
di-sukharev
eb4da47642 build 2023-05-21 15:10:17 +08:00
di-sukharev
b475d16c82 build 2023-05-21 15:10:17 +08:00
di-sukharev
fee4371bc6 build 2023-05-21 15:10:17 +08:00
di-sukharev
69e398ea2f build 2023-05-21 15:10:17 +08:00
di-sukharev
de4b2bd74a build 2023-05-21 15:10:17 +08:00
di-sukharev
b8c1e8c5ba build 2023-05-21 15:10:17 +08:00
di-sukharev
b0363fa638 build 2023-05-21 15:10:17 +08:00
di-sukharev
223a587765 build 2023-05-21 15:10:17 +08:00
di-sukharev
22f37e0da5 build 2023-05-21 15:10:17 +08:00
di-sukharev
2c5a37525c build 2023-05-21 15:10:17 +08:00
di-sukharev
9c87d8f7b8 build 2023-05-21 15:10:17 +08:00
di-sukharev
8f50c021c4 build 2023-05-21 15:10:17 +08:00
di-sukharev
9fb6946a6b build 2023-05-21 15:10:17 +08:00
di-sukharev
03210f16d0 build 2023-05-21 15:10:17 +08:00
di-sukharev
cec6890525 build 2023-05-21 15:10:17 +08:00
di-sukharev
3d199eb6d3 build 2023-05-21 15:10:17 +08:00
di-sukharev
a17462e9a2 build 2023-05-21 15:10:17 +08:00
di-sukharev
cfa5462cbd build 2023-05-21 15:10:17 +08:00
di-sukharev
c30d34e1b5 build 2023-05-21 15:10:17 +08:00
di-sukharev
b2ef14c586 build 2023-05-21 15:10:17 +08:00
di-sukharev
88f367d662 build 2023-05-21 15:10:17 +08:00
di-sukharev
32df5d5fe1 build 2023-05-21 15:10:17 +08:00
di-sukharev
13afc81858 build 2023-05-21 15:10:17 +08:00
di-sukharev
bf430b23db build 2023-05-21 15:10:17 +08:00
di-sukharev
820365dd06 build 2023-05-21 15:10:17 +08:00
di-sukharev
dd39ef2473 build 2023-05-21 15:10:17 +08:00
di-sukharev
5b0e3bf061 build 2023-05-21 15:10:17 +08:00
di-sukharev
a7da40f151 build 2023-05-21 15:10:17 +08:00
di-sukharev
23037a3988 build 2023-05-21 15:10:17 +08:00
di-sukharev
66eb8e1008 build 2023-05-21 15:10:17 +08:00
di-sukharev
90361b65ee build 2023-05-21 15:10:17 +08:00
di-sukharev
b7938e3488 build 2023-05-21 15:10:17 +08:00
di-sukharev
13db5f4498 build 2023-05-21 15:10:17 +08:00
di-sukharev
885b653e2e build 2023-05-21 15:10:17 +08:00
di-sukharev
f7ca45540a build 2023-05-21 15:10:17 +08:00
di-sukharev
10c89ded6e build 2023-05-21 15:10:17 +08:00
di-sukharev
542a53f3db build 2023-05-21 15:10:17 +08:00
di-sukharev
3b32ef3608 build 2023-05-21 15:10:17 +08:00
di-sukharev
6d23333305 build 2023-05-21 15:10:17 +08:00
di-sukharev
bd65b100a5 build 2023-05-21 15:10:16 +08:00
di-sukharev
b1e099f4bc build 2023-05-21 15:10:16 +08:00
di-sukharev
cfe891fec4 build 2023-05-21 15:10:16 +08:00
di-sukharev
fd5bccbcb1 build 2023-05-21 15:10:16 +08:00
di-sukharev
cbd2138552 build 2023-05-21 15:10:16 +08:00
di-sukharev
432fe88e82 build 2023-05-21 15:10:16 +08:00
di-sukharev
0f0c976b08 build 2023-05-21 15:10:16 +08:00
di-sukharev
06574056d7 build 2023-05-21 15:10:16 +08:00
di-sukharev
39e6568d73 build 2023-05-21 15:10:16 +08:00
di-sukharev
1867c96f22 build 2023-05-21 15:10:16 +08:00
di-sukharev
7f83ff9943 build 2023-05-21 15:10:16 +08:00
di-sukharev
f6749f38e9 build 2023-05-21 15:10:16 +08:00
di-sukharev
bb2dc327a7 build 2023-05-21 15:10:16 +08:00
di-sukharev
c45d5aa12b build 2023-05-21 15:10:16 +08:00
di-sukharev
17e80e0b4f build 2023-05-21 15:10:16 +08:00
di-sukharev
8acaf4c860 build 2023-05-21 15:10:16 +08:00
di-sukharev
20a2f68389 build 2023-05-21 15:10:16 +08:00
di-sukharev
25f791cfd5 build 2023-05-21 15:10:16 +08:00
di-sukharev
c9eb947148 build 2023-05-21 15:10:16 +08:00
di-sukharev
d6219478fc build 2023-05-21 15:10:16 +08:00
di-sukharev
be06729ad8 build 2023-05-21 15:10:16 +08:00
di-sukharev
f2680f2bf3 build 2023-05-21 15:10:16 +08:00
di-sukharev
f7afa94c9e build 2023-05-21 15:10:16 +08:00
di-sukharev
291cb2b5b6 build 2023-05-21 15:10:16 +08:00
di-sukharev
4552cc49a8 build 2023-05-21 15:10:16 +08:00
di-sukharev
8d09fe0b7c build 2023-05-21 15:10:16 +08:00
di-sukharev
490d209c64 build 2023-05-21 15:10:16 +08:00
di-sukharev
52a71728d4 build 2023-05-21 15:10:16 +08:00
di-sukharev
ffdf45dc17 build 2023-05-21 15:10:16 +08:00
di-sukharev
211ad20c34 build 2023-05-21 15:10:16 +08:00
di-sukharev
17802dcbd6 build 2023-05-21 15:10:16 +08:00
di-sukharev
053e1da0f1 removed logs 2023-05-21 15:10:16 +08:00
di-sukharev
666760d412 fixed 2023-05-21 15:10:16 +08:00
di-sukharev
c0e183797f build 2023-05-21 15:10:16 +08:00
di-sukharev
3dfa1e4a33 build 2023-05-21 15:10:16 +08:00
di-sukharev
2f5ea33f0f build 2023-05-21 15:10:16 +08:00
di-sukharev
476136a391 build 2023-05-21 15:10:16 +08:00
di-sukharev
fe555c66ed fix(api.ts): handle unknown errors in catch block and log error message if available
refactor(github-action.ts): remove console.log statements and improve readability of code
2023-05-21 15:10:16 +08:00
di-sukharev
431e10cb54 build 2023-05-21 15:10:16 +08:00
di-sukharev
e356b5dcf3 added log 2023-05-21 15:10:16 +08:00
di-sukharev
9fea9e244c build 2023-05-21 15:10:16 +08:00
di-sukharev
b16271a62f build 2023-05-21 15:10:16 +08:00
di-sukharev
87c978a58a build 2023-05-21 15:10:16 +08:00
di-sukharev
d6caa0c73c refactor(github-action.ts): remove unused spinner import and usage
refactor(github-action.ts): remove unused retries parameter from run function
refactor(github-action.ts): remove unused core.info call
refactor(github-action.ts): remove unused intro call
refactor(github-action.ts): remove unused spinner.stop call and replace with outro call
2023-05-21 15:10:16 +08:00
di-sukharev
efe0172f2d chore(action.yml): update node version to 16 to match the latest LTS version 2023-05-21 15:10:16 +08:00
di-sukharev
ba9503142c build 2023-05-21 15:10:16 +08:00
di-sukharev
b87faf0096 chore(README.md): update OpenCommit configuration variables to match new naming convention
fix(api.ts): change config variable names to match new naming convention
fix(api.ts): increase default max_tokens to 500
fix(commit.ts): stop spinner after commit message is generated

fix(config.ts): change config keys to use OCO prefix to avoid conflicts with other libraries
feat(config.ts): add support for OCO_EXCLUDE environment variable to exclude files from being committed
fix(prepare-commit-msg-hook.ts): change OPENAI_API_KEY to OCO_OPENAI_API_KEY to match new config keys
fix(generateCommitMessageFromGitDiff.ts): change config keys to use OCO prefix to match new config keys
2023-05-21 15:10:16 +08:00
di-sukharev
2681db1635 fix(action.yml): update path to main file to reflect new build output directory 2023-05-21 15:10:16 +08:00
di-sukharev
462798d7d2 remove /out from .gitignore to ba able to run opencommit via github action from other repos 2023-05-21 15:10:16 +08:00
di-sukharev
1abe655e00 chore(README.md): add instructions to setup OpenCommit as a Github Action
chore(action.yml): update description of the Github Action
feat(github-action.ts): add support for pattern input to only improve messages that match the regexp, e.g. ^fix$
2023-05-21 15:10:16 +08:00
di-sukharev
177a219ccb feat(commit.ts): add try-catch block to handle errors and show proper error messages
fix(commit.ts): fix indentation and formatting
feat(commit.ts): add support for selecting remote to push to when there are multiple remotes
fix(prepare-commit-msg-hook.ts): remove unnecessary if-else block and improve formatting
2023-05-21 15:10:16 +08:00
di-sukharev
373c90c760 refactor(commit.ts, prepare-commit-msg-hook.ts, generateCommitMessageFromGitDiff.ts, github-action.ts): rename generateCommitMessageWithChatCompletion to generateCommitMessageByDiff to improve semantics and consistency with other function names
feat(github-action.ts): add support for retrying the action in case of failure to improve robustness
2023-05-21 15:10:15 +08:00
di-sukharev
7652116e77 feat(github-action.ts): add support for pull_request events and improve event handling
refactor(github-action.ts): extract types for ListCommitsResponse, CommitsData, and CommitsArray
fix(github-action.ts): update improveCommitMessagesWithRebase function to accept CommitsArray type
2023-05-21 15:10:15 +08:00
di-sukharev
707d90de1c feat(package.json): add @octokit/webhooks-schemas and @octokit/webhooks-types for better webhook handling
refactor(github-action.ts): replace child_process with execa for better command execution
refactor(github-action.ts): use octokit/webhooks-types for improved typing and handling of GitHub events
feat(github-action.ts): add support for improving commit messages on push and pull_request events
2023-05-21 15:10:15 +08:00
di-sukharev
7615b95261 feat(action.yml): add OpenCommit GitHub Action configuration
feat(package.json): add @actions/core and @actions/github dependencies
feat(src/github-action.ts): create GitHub Action to check and replace 'oc' in commit messages with AI-generated messages
2023-05-21 15:10:15 +08:00
58 changed files with 11334 additions and 69362 deletions

View File

@@ -1 +0,0 @@
.env

View File

@@ -21,8 +21,8 @@
"rules": { "rules": {
"prettier/prettier": "error", "prettier/prettier": "error",
"no-console": "error", "no-console": "error",
"import/order": "off",
"sort-imports": "off", "sort-imports": "off",
"import/order": "off",
"simple-import-sort/imports": "error", "simple-import-sort/imports": "error",
"simple-import-sort/exports": "error", "simple-import-sort/exports": "error",
"import/first": "error", "import/first": "error",

View File

@@ -9,7 +9,7 @@ Thanks for considering contributing to the project.
3. Create a new branch for your changes. 3. Create a new branch for your changes.
4. Make your changes and commit them with descriptive commit messages. 4. Make your changes and commit them with descriptive commit messages.
5. Push your changes to your forked repository. 5. Push your changes to your forked repository.
6. Create a pull request from your branch to the `dev` branch. Not `master` branch, PR to `dev` branch, please. 6. Create a pull request from your branch to the `dev` branch.
## Getting started ## Getting started

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

28
.github/workflows/stale.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
# For more information, see:
# https://github.com/actions/stale
name: Mark stale issues and pull requests
on:
schedule:
- cron: '27 21 * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 40
stale-issue-message: 'Stale issue message'
stale-pr-message: 'Stale pull request message'
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'

View File

@@ -1,46 +0,0 @@
name: Testing
on: [pull_request]
jobs:
unit-test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [20.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- name: Install dependencies
run: npm install
- name: Run Unit Tests
run: npm run test:unit
e2e-test:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [20.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
- name: Install git
run: |
sudo apt-get update
sudo apt-get install -y git
git --version
- name: Setup git
run: |
git config --global user.email "test@example.com"
git config --global user.name "Test User"
- name: Install dependencies
run: npm install
- name: Build
run: npm run build
- name: Run E2E Tests
run: npm run test:e2e

View File

@@ -1 +0,0 @@
out/github-action.cjs

View File

@@ -1 +0,0 @@
out

View File

@@ -1,3 +1,2 @@
/build /build
/dist /dist
/out

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) Dima Sukharev, https://github.com/di-sukharev Copyright (c) Dima Sukharev
Permission is hereby granted, free of charge, to any person obtaining a Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

516
README.md
View File

@@ -2,12 +2,12 @@
<div> <div>
<img src=".github/logo-grad.svg" alt="OpenCommit logo"/> <img src=".github/logo-grad.svg" alt="OpenCommit logo"/>
<h1 align="center">OpenCommit</h1> <h1 align="center">OpenCommit</h1>
<h4 align="center">Follow the bird <a href="https://twitter.com/_sukharev_"><img src="https://img.shields.io/twitter/follow/_sukharev_?style=flat&label=_sukharev_&logo=twitter&color=0bf&logoColor=fff" align="center"></a> <h4 align="center">Follow the bird <a href="https://twitter.com/io_Y_oi"><img src="https://img.shields.io/twitter/follow/io_Y_oi?style=flat&label=io_Y_oi&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
</h4>
</div> </div>
<h2>Auto-generate meaningful commits in a second</h2> <h2>GPT CLI to auto-generate impressive commits in 1 second</h2>
<p>Killing lame commits with AI 🤯🔫</p> <p>Killing lame commits with AI 🤯🔫</p>
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a> <a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
<h4 align="center">🪩 Winner of <a href="https://twitter.com/_sukharev_/status/1683448136973582336">GitHub 2023 hackathon</a> 🪩</h4>
</div> </div>
--- ---
@@ -16,334 +16,24 @@
<img src=".github/opencommit-example.png" alt="OpenCommit example"/> <img src=".github/opencommit-example.png" alt="OpenCommit example"/>
</div> </div>
All the commits in this repo are authored by OpenCommit — look at [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emojis and long commit descriptions are configurable. All the commits in this repo are done with OpenCommit — look into [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emoji and long commit description text is configurable.
## Setup OpenCommit as a CLI tool ## Setup OpenCommit as a Github Action
You can use OpenCommit by simply running it via the CLI like this `oco`. 2 seconds and your staged changes are committed with a meaningful message.
1. Install OpenCommit globally to use in any repository:
```sh
npm install -g opencommit
```
Alternatively run it via `npx opencommit` or `bunx opencommit`
MacOS may ask to run the command with `sudo` when installing a package globally.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
3. Set the key to OpenCommit config:
```sh
oco config set OCO_OPENAI_API_KEY=<your_api_key>
```
Your API key is stored locally in the `~/.opencommit` config file.
## Usage
You can call OpenCommit directly to generate a commit message for your staged changes:
```sh
git add <files...>
opencommit
```
You can also use the `oco` shortcut:
```sh
git add <files...>
oco
```
Link to the GitMoji specification: https://gitmoji.dev/
You can also run it with local model through ollama:
- install and start ollama
- run `ollama run mistral` (do this only once, to pull model)
- run (in your project directory):
```sh
git add <files...>
OCO_AI_PROVIDER='ollama' opencommit
```
### Flags
There are multiple optional flags that can be used with the `oco` command:
#### Use Full GitMoji Specification
This flag can only be used if the `OCO_EMOJI` configuration item is set to `true`. This flag allows users to use all emojis in the GitMoji specification, By default, the GitMoji full specification is set to `false`, which only includes 10 emojis (🐛✨📝🚀✅♻️⬆️🔧🌐💡).
This is due to limit the number of tokens sent in each request. However, if you would like to use the full GitMoji specification, you can use the `--fgm` flag.
```
oco --fgm
```
#### Skip Commit Confirmation
This flag allows users to automatically commit the changes without having to manually confirm the commit message. This is useful for users who want to streamline the commit process and avoid additional steps. To use this flag, you can run the following command:
```
oco --yes
```
## Configuration
### Local per repo configuration
Create a `.env` file and add OpenCommit config variables there like this:
```env
OCO_OPENAI_API_KEY=<your OpenAI API token>
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
OCO_EMOJI=<boolean, add GitMoji>
OCO_MODEL=<either 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview'>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
```
### Global config for all repos
Local config still has more priority than Global config, but you may set `OCO_MODEL` and `OCO_LOCALE` globally and set local configs for `OCO_EMOJI` and `OCO_DESCRIPTION` per repo which is more convenient.
Simply set any of the variables above like this:
```sh
oco config set OCO_MODEL=gpt-4
```
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
```sh
oco config set OCO_EMOJI=true
```
To remove preface emojis:
```sh
oco config set OCO_EMOJI=false
```
### Switch to GPT-4 or other models
By default, OpenCommit uses `gpt-3.5-turbo` model.
You may switch to GPT-4 which performs better, but costs ~x15 times more 🤠
```sh
oco config set OCO_MODEL=gpt-4
```
or for as a cheaper option:
```sh
oco config set OCO_MODEL=gpt-3.5-turbo
```
or for GPT-4 Turbo (Preview) which is more capable, has knowledge of world events up to April 2023, a 128k context window and 2-3x cheaper vs GPT-4:
```sh
oco config set OCO_MODEL=gpt-4-0125-preview
```
Make sure that you spell it `gpt-4` (lowercase) and that you have API access to the 4th model. Even if you have ChatGPT+, that doesn't necessarily mean that you have API access to GPT-4.
### Switch to Azure OpenAI
By default OpenCommit uses [OpenAI](https://openai.com).
You could switch to [Azure OpenAI Service](https://learn.microsoft.com/azure/cognitive-services/openai/)🚀
```sh
opencommit config set OCO_AI_PROVIDER=azure
```
Of course need to set 'OPENAI_API_KEY'. And also need to set the
'OPENAI_BASE_PATH' for the endpoint and set the deployment name to
'model'.
### Locale configuration
To globally specify the language used to generate commit messages:
```sh
# de, German ,Deutsch
oco config set OCO_LANGUAGE=de
oco config set OCO_LANGUAGE=German
oco config set OCO_LANGUAGE=Deutsch
# fr, French, française
oco config set OCO_LANGUAGE=fr
oco config set OCO_LANGUAGE=French
oco config set OCO_LANGUAGE=française
```
The default language setting is **English**
All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder
### Push to git
Pushing to git is on by default but if you would like to turn it off just use:
```sh
oco config set OCO_GITPUSH=false
```
### Switch to `@commitlint`
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
You can set this option by running the following command:
```sh
oco config set OCO_PROMPT_MODULE=<module>
```
Replace `<module>` with either `conventional-commit` or `@commitlint`.
#### Example:
To switch to using th` '@commitlint` prompt module, run:
```sh
oco config set OCO_PROMPT_MODULE=@commitlint
```
To switch back to the default conventional-commit message generator, run:
```sh
oco config set OCO_PROMPT_MODULE=conventional-commit
```
#### Integrating with `@commitlint`
The integration between `@commitlint` and OpenCommit is done automatically the first time OpenCommit is run with `OCO_PROMPT_MODULE` set to `@commitlint`. However, if you need to force set or reset the configuration for `@commitlint`, you can run the following command:
```sh
oco commitlint force
```
To view the generated configuration for `@commitlint`, you can use this command:
```sh
oco commitlint get
```
This allows you to ensure that the configuration is set up as desired.
Additionally, the integration creates a file named `.opencommit-commitlint` which contains the prompts used for the local `@commitlint` configuration. You can modify this file to fine-tune the example commit message generated by OpenAI. This gives you the flexibility to make adjustments based on your preferences or project guidelines.
OpenCommit generates a file named `.opencommit-commitlint` in your project directory which contains the prompts used for the local `@commitlint` configuration. You can modify this file to fine-tune the example commit message generated by OpenAI. If the local `@commitlint` configuration changes, this file will be updated the next time OpenCommit is run.
This offers you greater control over the generated commit messages, allowing for customization that aligns with your project's conventions.
## Git flags
The `opencommit` or `oco` commands can be used in place of the `git commit -m "${generatedMessage}"` command. This means that any regular flags that are used with the `git commit` command will also be applied when using `opencommit` or `oco`.
```sh
oco --no-verify
```
is translated to :
```sh
git commit -m "${generatedMessage}" --no-verify
```
To include a message in the generated message, you can utilize the template function, for instance:
```sh
oco '#205: $msg
```
> opencommit examines placeholders in the parameters, allowing you to append additional information before and after the placeholders, such as the relevant Issue or Pull Request. Similarly, you have the option to customize the OCO_MESSAGE_TEMPLATE_PLACEHOLDER configuration item, for example, simplifying it to $m!"
### Message Template Placeholder Config
#### Overview
The `OCO_MESSAGE_TEMPLATE_PLACEHOLDER` feature in the `opencommit` tool allows users to embed a custom message within the generated commit message using a template function. This configuration is designed to enhance the flexibility and customizability of commit messages, making it easier for users to include relevant information directly within their commits.
#### Implementation Details
In our codebase, the implementation of this feature can be found in the following segment:
```javascript
commitMessage = messageTemplate.replace(
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
commitMessage
);
```
This line is responsible for replacing the placeholder in the `messageTemplate` with the actual `commitMessage`.
#### Usage
For instance, using the command `oco '$msg #205`, users can leverage this feature. The provided code represents the backend mechanics of such commands, ensuring that the placeholder is replaced with the appropriate commit message.
#### Committing with the Message
Once users have generated their desired commit message, they can proceed to commit using the generated message. By understanding the feature's full potential and its implementation details, users can confidently use the generated messages for their commits.
### Ignore files
You can remove files from being sent to OpenAI by creating a `.opencommitignore` file. For example:
```ignorelang
path/to/large-asset.zip
**/*.jpg
```
This helps prevent opencommit from uploading artifacts and large files.
By default, opencommit ignores files matching: `*-lock.*` and `*.lock`
## Git hook (KILLER FEATURE)
You can set OpenCommit as Git [`prepare-commit-msg`](https://git-scm.com/docs/githooks#_prepare_commit_msg) hook. Hook integrates with your IDE Source Control and allows you to edit the message before committing.
To set the hook:
```sh
oco hook set
```
To unset the hook:
```sh
oco hook unset
```
To use the hook:
```sh
git add <files...>
git commit
```
Or follow the process of your IDE Source Control feature, when it calls `git commit` command — OpenCommit will integrate into the flow.
## Setup OpenCommit as a GitHub Action (BETA) 🔥
OpenCommit is now available as a GitHub Action which automatically improves all new commits messages when you push to remote! OpenCommit is now available as a GitHub Action which automatically improves all new commits messages when you push to remote!
This is great if you want to make sure all of the commits in all of your repository branches are meaningful and not lame like `fix1` or `done2`. This is great if you want to make sure all of the commits in all of repository branches are meaningful and not lame like `fix1` or `done2`.
Create a file `.github/workflows/opencommit.yml` with the contents below: ### Automatic 1 click setup
You can simply [setup the action automatically via the GitHub Marketplace](TODO).
### Manual 3 clicks setup
Create a file `.github/workflows/opencommit.yml` with contents below:
```yml ```yml
name: 'OpenCommit Action' name: 'OpenCommit'
on: on:
push: push:
@@ -353,7 +43,6 @@ on:
jobs: jobs:
opencommit: opencommit:
timeout-minutes: 10
name: OpenCommit name: OpenCommit
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions: write-all permissions: write-all
@@ -365,7 +54,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: di-sukharev/opencommit@github-action-v1.0.4 - uses: di-sukharev/opencommit@github-action
with: with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -376,28 +65,177 @@ jobs:
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }} OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
# customization # customization
OCO_TOKENS_MAX_INPUT: 4096 OCO_OPENAI_MAX_TOKENS: 500
OCO_TOKENS_MAX_OUTPUT: 500
OCO_OPENAI_BASE_PATH: '' OCO_OPENAI_BASE_PATH: ''
OCO_DESCRIPTION: false OCO_DESCRIPTION: false
OCO_EMOJI: false OCO_EMOJI: false
OCO_MODEL: gpt-3.5-turbo OCO_MODEL: gpt-3.5-turbo
OCO_LANGUAGE: en OCO_LANGUAGE: en
OCO_PROMPT_MODULE: conventional-commit
``` ```
That is it. Now when you push to any branch in your repo — all NEW commits are being improved by your never-tired AI. That is it. Now when you push to any branch in your repo — all NEW commits are being improved by never-tired-AI.
Make sure you exclude public collaboration branches (`main`, `dev`, `etc`) in `branches-ignore`, so OpenCommit does not rebase commits there while improving the messages. Make sure you exclude public collaboration branches (`main`, `dev`, `etc`) in `branches-ignore`, so OpenCommit does not rebase commits there when improving the messages.
Interactive rebase (`rebase -i`) changes commits' SHA, so the commit history in remote becomes different from your local branch history. This is okay if you work on the branch alone, but may be inconvenient for other collaborators. Interactive rebase (`rebase -i`) changes commit SHA, so commit history in remote becomes different with your local branch history. It's ok when you work on the branch alone, but may be inconvenient for other collaborators.
## Setup OpenCommit as a CLI
You can use OpenCommit by simply running it via CLI like this `oc`. 2 seconds and your staged changes are committed with a meaningful message.
1. Install OpenCommit globally to use in any repository:
```sh
npm install -g opencommit
```
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure you add payment details, so API works.
3. Set the key to OpenCommit config:
```sh
opencommit config set OCO_OPENAI_API_KEY=<your_api_key>
```
Your api key is stored locally in `~/.opencommit` config file.
## Usage
You can call OpenCommit directly to generate a commit message for your staged changes:
```sh
git add <files...>
opencommit
```
You can also use the `oc` shortcut:
```sh
git add <files...>
oc
```
## Configuration
### Local per repo configuration
Create an `.env` file and add OpenCommit config variables there like this:
```env
OCO_OPENAI_API_KEY=<your openAI API token>
OCO_OPENAI_MAX_TOKENS=<max response tokens from openAI API>
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to openAI api>
OCO_DESCRIPTION=<postface a message with ~3 sentences description>
OCO_EMOJI=<add GitMoji>
OCO_MODEL=<either gpt-3.5-turbo or gpt-4>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
```
### Global config for all repos
Local config still has more priority as Global config, but you may set `OCO_MODEL` and `OCO_LOCALE` globally and set local configs for `OCO_EMOJI` and `OCO_DESCRIPTION` per repo which is more convenient.
Simply run any of the variable above like this:
```sh
oc config set OCO_OPENAI_API_KEY=gpt-4
```
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
```sh
oc config set OCO_EMOJI=true
```
To remove preface emoji:
```sh
oc config set OCO_EMOJI=false
```
### Switch to GPT-4
By default OpenCommit uses GPT-3.5-turbo (ChatGPT).
You may switch to GPT-4 which performs better, but costs ~x15 times more 🤠
```sh
oc config set OCO_MODEL=gpt-4
```
Make sure you do lowercase `gpt-4` and you have API access to the 4th model. Even if you have ChatGPT+ it doesn't necessarily mean that you have API access to GPT-4.
## Locale configuration
To globally specify the language used to generate commit messages:
```sh
# de, German ,Deutsch
oc config set OCO_LANGUAGE=de
oc config set OCO_LANGUAGE=German
oc config set OCO_LANGUAGE=Deutsch
# fr, French, française
oc config set OCO_LANGUAGE=fr
oc config set OCO_LANGUAGE=French
oc config set OCO_LANGUAGE=française
```
The default language set is **English**
All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder
### Git flags
The `opencommit` or `oc` commands can be used in place of the `git commit -m "${generatedMessage}"` command. This means that any regular flags that are used with the `git commit` command will also be applied when using `opencommit` or `oc`.
```sh
oc --no-verify
```
is translated to :
```sh
git commit -m "${generatedMessage}" --no-verify
```
### Ignore files
You can ignore files from submission to OpenAI by creating a `.opencommitignore` file. For example:
```ignorelang
path/to/large-asset.zip
**/*.jpg
```
This is useful for preventing opencommit from uploading artifacts and large files.
By default, opencommit ignores files matching: `*-lock.*` and `*.lock`
## Git hook (KILLER FEATURE)
You can set OpenCommit as Git [`prepare-commit-msg`](https://git-scm.com/docs/githooks#_prepare_commit_msg) hook. Hook integrates with you IDE Source Control and allows you edit the message before commit.
To set the hook:
```sh
oc hook set
```
To unset the hook:
```sh
oc hook unset
```
To use the hook:
```sh
git add <files...>
git commit
```
Or follow the process of your IDE Source Control feature, when it calls `git commit` command — OpenCommit will integrate into the flow.
## Payments ## Payments
You pay for your requests to OpenAI API on your own. You pay for your own requests to OpenAI API. OpenCommit uses ChatGPT (3.5-turbo) official model, that is ~15x times cheaper than GPT-4.
OpenCommit stores your key locally.
OpenCommit by default uses 3.5-turbo model, it should not exceed $0.10 per casual working day.
You may switch to gpt-4, it's better, but more expensive.

View File

@@ -1,10 +1,7 @@
name: 'OpenCommit — improve commits with AI 🧙' name: 'OpenCommit'
description: 'Replaces lame commit messages with meaningful AI-generated messages when you push to remote' description: 'Replaces lame commit messages with meaningful AI-generated messages when you push to remote 🤯🔫'
author: 'https://github.com/di-sukharev' author: 'https://github.com/di-sukharev'
repo: 'https://github.com/di-sukharev/opencommit/tree/github-action' repo: 'https://github.com/di-sukharev/opencommit/tree/github-action'
branding:
icon: 'git-commit'
color: 'green'
keywords: keywords:
[ [
'git', 'git',

View File

@@ -1,28 +0,0 @@
/**
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
import type { Config } from 'jest';
const config: Config = {
testTimeout: 100_000,
coverageProvider: 'v8',
moduleDirectories: ['node_modules', 'src'],
preset: 'ts-jest/presets/js-with-ts-esm',
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
testEnvironment: 'node',
testRegex: ['.*\\.test\\.ts$'],
transformIgnorePatterns: ['node_modules/(?!cli-testing-library)'],
transform: {
'^.+\\.(ts|tsx)$': [
'ts-jest',
{
diagnostics: false,
useESM: true
}
]
}
};
export default config;

26514
out/cli.cjs

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

6789
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "opencommit", "name": "opencommit",
"version": "3.0.16", "version": "2.0.15",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫", "description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [ "keywords": [
"git", "git",
@@ -12,12 +12,12 @@
"aicommit", "aicommit",
"aicommits", "aicommits",
"gptcommit", "gptcommit",
"commit", "commit"
"ollama"
], ],
"main": "cli.js", "main": "cli.js",
"bin": { "bin": {
"opencommit": "./out/cli.cjs", "opencommit": "./out/cli.cjs",
"oc": "./out/cli.cjs",
"oco": "./out/cli.cjs" "oco": "./out/cli.cjs"
}, },
"repository": { "repository": {
@@ -27,8 +27,7 @@
"author": "https://github.com/di-sukharev", "author": "https://github.com/di-sukharev",
"license": "MIT", "license": "MIT",
"files": [ "files": [
"out/cli.cjs", "out/**/*"
"out/tiktoken_bg.wasm"
], ],
"release": { "release": {
"branches": [ "branches": [
@@ -41,35 +40,22 @@
"scripts": { "scripts": {
"watch": "npm run -S build -- --sourcemap --watch", "watch": "npm run -S build -- --sourcemap --watch",
"start": "node ./out/cli.cjs", "start": "node ./out/cli.cjs",
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
"dev": "ts-node ./src/cli.ts", "dev": "ts-node ./src/cli.ts",
"build": "rimraf out && node esbuild.config.js", "build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push", "deploy": "npm run build && npm version patch && npm publish --tag latest",
"deploy": "npm version patch && npm run build:push && git push --tags && npm publish --tag latest",
"lint": "eslint src --ext ts && tsc --noEmit", "lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src", "format": "prettier --write src"
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest test/unit",
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
"test:e2e": "jest test/e2e",
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e"
}, },
"devDependencies": { "devDependencies": {
"@commitlint/types": "^17.4.4",
"@types/ini": "^1.3.31", "@types/ini": "^1.3.31",
"@types/inquirer": "^9.0.3", "@types/inquirer": "^9.0.3",
"@types/jest": "^29.5.12",
"@types/node": "^16.18.14", "@types/node": "^16.18.14",
"@typescript-eslint/eslint-plugin": "^5.45.0", "@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0", "@typescript-eslint/parser": "^5.45.0",
"cli-testing-library": "^2.0.2",
"dotenv": "^16.0.3", "dotenv": "^16.0.3",
"esbuild": "^0.15.18", "esbuild": "^0.15.18",
"eslint": "^8.28.0", "eslint": "^8.28.0",
"jest": "^29.7.0",
"prettier": "^2.8.4", "prettier": "^2.8.4",
"ts-jest": "^29.1.2",
"ts-node": "^10.9.1", "ts-node": "^10.9.1",
"typescript": "^4.9.3" "typescript": "^4.9.3"
}, },
@@ -77,17 +63,13 @@
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/github": "^5.1.1", "@actions/github": "^5.1.1",
"@azure/openai": "^1.0.0-beta.12",
"@anthropic-ai/sdk": "^0.19.2",
"@clack/prompts": "^0.6.1", "@clack/prompts": "^0.6.1",
"@dqbd/tiktoken": "^1.0.2", "@dqbd/tiktoken": "^1.0.2",
"@octokit/webhooks-schemas": "^6.11.0", "@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0", "@octokit/webhooks-types": "^6.11.0",
"ai": "^2.2.14",
"axios": "^1.3.4", "axios": "^1.3.4",
"chalk": "^5.2.0", "chalk": "^5.2.0",
"cleye": "^1.3.2", "cleye": "^1.3.2",
"crypto": "^1.0.1",
"execa": "^7.0.0", "execa": "^7.0.0",
"ignore": "^5.2.4", "ignore": "^5.2.4",
"ini": "^3.0.1", "ini": "^3.0.1",

View File

@@ -1,5 +1,4 @@
export enum COMMANDS { export enum COMMANDS {
config = 'config',
hook = 'hook', hook = 'hook',
commitlint = 'commitlint' config = 'config'
} }

View File

@@ -1,49 +1,27 @@
import { intro, outro } from '@clack/prompts';
import axios from 'axios'; import axios from 'axios';
import chalk from 'chalk'; import chalk from 'chalk';
import { execa } from 'execa';
import { import {
ChatCompletionRequestMessage, ChatCompletionRequestMessage,
Configuration as OpenAiApiConfiguration, Configuration as OpenAiApiConfiguration,
OpenAIApi OpenAIApi
} from 'openai'; } from 'openai';
import { intro, outro } from '@clack/prompts'; import { CONFIG_MODES, getConfig } from './commands/config';
import {
CONFIG_MODES,
DEFAULT_TOKEN_LIMITS,
getConfig
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
import { MODEL_LIST } from '../commands/config';
const config = getConfig(); const config = getConfig();
const MAX_TOKENS_OUTPUT = let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
config?.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const MAX_TOKENS_INPUT =
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
let basePath = config?.OCO_OPENAI_BASE_PATH; let basePath = config?.OCO_OPENAI_BASE_PATH;
let apiKey = config?.OCO_OPENAI_API_KEY; let apiKey = config?.OCO_OPENAI_API_KEY;
const [command, mode] = process.argv.slice(2); const [command, mode] = process.argv.slice(2);
const provider = config?.OCO_AI_PROVIDER; if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
if (
provider === 'openai' &&
!apiKey &&
command !== 'config' &&
mode !== CONFIG_MODES.set
) {
intro('opencommit'); intro('opencommit');
outro( outro(
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token> . If you are using GPT, make sure you add payment details, so API works.`' 'OCO_OPENAI_API_KEY is not set, please run `oc config set OCO_OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
); );
outro( outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup' 'For help look into README https://github.com/di-sukharev/opencommit#setup'
@@ -53,20 +31,8 @@ if (
} }
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo'; const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
if (provider === 'openai' &&
!MODEL_LIST.openai.includes(MODEL) &&
command !== 'config' &&
mode !== CONFIG_MODES.set) {
outro(
`${chalk.red('✖')} Unsupported model ${MODEL} for OpenAI. Supported models are: ${MODEL_LIST.openai.join(
', '
)}`
);
process.exit(1); class OpenAi {
}
class OpenAi implements AiEngine {
private openAiApiConfiguration = new OpenAiApiConfiguration({ private openAiApiConfiguration = new OpenAiApiConfiguration({
apiKey: apiKey apiKey: apiKey
}); });
@@ -87,17 +53,9 @@ class OpenAi implements AiEngine {
messages, messages,
temperature: 0, temperature: 0,
top_p: 0.1, top_p: 0.1,
max_tokens: MAX_TOKENS_OUTPUT max_tokens: maxTokens || 500
}; };
try { try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content) + 4)
.reduce((a, b) => a + b, 0);
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}
const { data } = await this.openAI.createChatCompletion(params); const { data } = await this.openAI.createChatCompletion(params);
const message = data.choices[0].message; const message = data.choices[0].message;
@@ -126,4 +84,18 @@ class OpenAi implements AiEngine {
}; };
} }
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { data } = await axios.get(
'https://unpkg.com/opencommit/package.json'
);
return data.version;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};
export const api = new OpenAi(); export const api = new OpenAi();

View File

@@ -1,14 +1,13 @@
#!/usr/bin/env node #!/usr/bin/env node
import { cli } from 'cleye'; import { cli } from 'cleye';
import packageJSON from '../package.json' assert { type: 'json' };
import packageJSON from '../package.json';
import { commit } from './commands/commit';
import { commitlintConfigCommand } from './commands/commitlint';
import { configCommand } from './commands/config'; import { configCommand } from './commands/config';
import { hookCommand, isHookCalled } from './commands/githook.js'; import { hookCommand, isHookCalled } from './commands/githook.js';
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook'; import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
import { checkIsLatestVersion } from './utils/checkIsLatestVersion'; import { commit } from './commands/commit';
// import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
const extraArgs = process.argv.slice(2); const extraArgs = process.argv.slice(2);
@@ -16,26 +15,18 @@ cli(
{ {
version: packageJSON.version, version: packageJSON.version,
name: 'opencommit', name: 'opencommit',
commands: [configCommand, hookCommand, commitlintConfigCommand], commands: [configCommand, hookCommand],
flags: { flags: {},
fgm: Boolean,
yes: {
type: Boolean,
alias: 'y',
description: 'Skip commit confirmation prompt',
default: false
}
},
ignoreArgv: (type) => type === 'unknown-flag' || type === 'argument', ignoreArgv: (type) => type === 'unknown-flag' || type === 'argument',
help: { description: packageJSON.description } help: { description: packageJSON.description }
}, },
async ({ flags }) => { async () => {
await checkIsLatestVersion(); // await checkIsLatestVersion();
if (await isHookCalled()) { if (await isHookCalled()) {
prepareCommitMessageHook(); prepareCommitMessageHook();
} else { } else {
commit(extraArgs, false, flags.fgm, flags.yes); commit(extraArgs);
} }
}, },
extraArgs extraArgs

View File

@@ -1,9 +0,0 @@
# @commitlint Module for opencommit
1. Load commitlint configuration within tree.
2. Generate a commit with commitlint prompt:
- Will not run if hash is the same.
- Infer a prompt for each commitlint rule.
- Ask OpenAI to generate consistency with embedded commitlint rules.
- Store configuration close to commitlint configuration.
3. Replace conventional-commit prompt with commitlint prompt.

View File

@@ -1,17 +1,8 @@
import chalk from 'chalk';
import { execa } from 'execa'; import { execa } from 'execa';
import { import {
confirm, GenerateCommitMessageErrorEnum,
intro, generateCommitMessageByDiff
isCancel, } from '../generateCommitMessageFromGitDiff';
multiselect,
outro,
select,
spinner
} from '@clack/prompts';
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
import { import {
assertGitRepo, assertGitRepo,
getChangedFiles, getChangedFiles,
@@ -19,65 +10,44 @@ import {
getStagedFiles, getStagedFiles,
gitAdd gitAdd
} from '../utils/git'; } from '../utils/git';
import {
spinner,
confirm,
outro,
isCancel,
intro,
multiselect,
select
} from '@clack/prompts';
import chalk from 'chalk';
import { trytm } from '../utils/trytm'; import { trytm } from '../utils/trytm';
import { getConfig } from './config';
const config = getConfig();
const getGitRemotes = async () => { const getGitRemotes = async () => {
const { stdout } = await execa('git', ['remote']); const { stdout } = await execa('git', ['remote']);
return stdout.split('\n').filter((remote) => Boolean(remote.trim())); return stdout.split('\n').filter((remote) => Boolean(remote.trim()));
}; };
// Check for the presence of message templates
const checkMessageTemplate = (extraArgs: string[]): string | false => {
for (const key in extraArgs) {
if (extraArgs[key].includes(config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER))
return extraArgs[key];
}
return false;
};
const generateCommitMessageFromGitDiff = async ( const generateCommitMessageFromGitDiff = async (
diff: string, diff: string,
extraArgs: string[], extraArgs: string[]
fullGitMojiSpec: boolean,
skipCommitConfirmation: boolean
): Promise<void> => { ): Promise<void> => {
await assertGitRepo(); await assertGitRepo();
const commitSpinner = spinner(); const commitSpinner = spinner();
commitSpinner.start('Generating the commit message'); commitSpinner.start('Generating the commit message');
try { try {
let commitMessage = await generateCommitMessageByDiff( const commitMessage = await generateCommitMessageByDiff(diff);
diff,
fullGitMojiSpec
);
const messageTemplate = checkMessageTemplate(extraArgs);
if (
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER &&
typeof messageTemplate === 'string'
) {
const messageTemplateIndex = extraArgs.indexOf(messageTemplate);
extraArgs.splice(messageTemplateIndex, 1);
commitMessage = messageTemplate.replace(
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
commitMessage
);
}
commitSpinner.stop('📝 Commit message generated'); commitSpinner.stop('📝 Commit message generated');
outro( outro(
`Generated commit message: `Commit message:
${chalk.grey('——————————————————')} ${chalk.grey('——————————————————')}
${commitMessage} ${commitMessage}
${chalk.grey('——————————————————')}` ${chalk.grey('——————————————————')}`
); );
const isCommitConfirmedByUser = skipCommitConfirmation || await confirm({ const isCommitConfirmedByUser = await confirm({
message: 'Confirm the commit message?' message: 'Confirm the commit message?'
}); });
@@ -95,17 +65,13 @@ ${chalk.grey('——————————————————')}`
const remotes = await getGitRemotes(); const remotes = await getGitRemotes();
// user isn't pushing, return early
if (config?.OCO_GITPUSH === false)
return
if (!remotes.length) { if (!remotes.length) {
const { stdout } = await execa('git', ['push']); const { stdout } = await execa('git', ['push']);
if (stdout) outro(stdout); if (stdout) outro(stdout);
process.exit(0); process.exit(0);
} }
if (remotes.length === 1 && config?.OCO_GITPUSH !== true) { if (remotes.length === 1) {
const isPushConfirmedByUser = await confirm({ const isPushConfirmedByUser = await confirm({
message: 'Do you want to run `git push`?' message: 'Do you want to run `git push`?'
}); });
@@ -113,7 +79,7 @@ ${chalk.grey('——————————————————')}`
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) { if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
const pushSpinner = spinner(); const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${remotes[0]}'`); pushSpinner.start(`Running \`git push ${remotes[0]}\``);
const { stdout } = await execa('git', [ const { stdout } = await execa('git', [
'push', 'push',
@@ -141,7 +107,7 @@ ${chalk.grey('——————————————————')}`
if (!isCancel(selectedRemote)) { if (!isCancel(selectedRemote)) {
const pushSpinner = spinner(); const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${selectedRemote}'`); pushSpinner.start(`Running \`git push ${selectedRemote}\``);
const { stdout } = await execa('git', ['push', selectedRemote]); const { stdout } = await execa('git', ['push', selectedRemote]);
@@ -155,18 +121,6 @@ ${chalk.grey('——————————————————')}`
} else outro(`${chalk.gray('✖')} process cancelled`); } else outro(`${chalk.gray('✖')} process cancelled`);
} }
} }
if (!isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
const regenerateMessage = await confirm({
message: 'Do you want to regenerate the message ?'
});
if (regenerateMessage && !isCancel(isCommitConfirmedByUser)) {
await generateCommitMessageFromGitDiff(
diff,
extraArgs,
fullGitMojiSpec
)
}
}
} catch (error) { } catch (error) {
commitSpinner.stop('📝 Commit message generated'); commitSpinner.stop('📝 Commit message generated');
@@ -178,16 +132,14 @@ ${chalk.grey('——————————————————')}`
export async function commit( export async function commit(
extraArgs: string[] = [], extraArgs: string[] = [],
isStageAllFlag: Boolean = false, isStageAllFlag: Boolean = false
fullGitMojiSpec: boolean = false,
skipCommitConfirmation: boolean = false
) { ) {
if (isStageAllFlag) { if (isStageAllFlag) {
const changedFiles = await getChangedFiles(); const changedFiles = await getChangedFiles();
if (changedFiles) await gitAdd({ files: changedFiles }); if (changedFiles) await gitAdd({ files: changedFiles });
else { else {
outro('No changes detected, write some code and run `oco` again'); outro('No changes detected, write some code and run `oc` again');
process.exit(1); process.exit(1);
} }
} }
@@ -220,7 +172,7 @@ export async function commit(
isStageAllAndCommitConfirmedByUser && isStageAllAndCommitConfirmedByUser &&
!isCancel(isStageAllAndCommitConfirmedByUser) !isCancel(isStageAllAndCommitConfirmedByUser)
) { ) {
await commit(extraArgs, true, fullGitMojiSpec); await commit(extraArgs, true);
process.exit(1); process.exit(1);
} }
@@ -238,7 +190,7 @@ export async function commit(
await gitAdd({ files }); await gitAdd({ files });
} }
await commit(extraArgs, false, fullGitMojiSpec); await commit(extraArgs, false);
process.exit(1); process.exit(1);
} }
@@ -251,9 +203,7 @@ export async function commit(
const [, generateCommitError] = await trytm( const [, generateCommitError] = await trytm(
generateCommitMessageFromGitDiff( generateCommitMessageFromGitDiff(
await getDiff({ files: stagedFiles }), await getDiff({ files: stagedFiles }),
extraArgs, extraArgs
fullGitMojiSpec,
skipCommitConfirmation
) )
); );

View File

@@ -1,46 +0,0 @@
import chalk from 'chalk';
import { command } from 'cleye';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum';
import { configureCommitlintIntegration } from '../modules/commitlint/config';
import { getCommitlintLLMConfig } from '../modules/commitlint/utils';
export enum CONFIG_MODES {
get = 'get',
force = 'force'
}
export const commitlintConfigCommand = command(
{
name: COMMANDS.commitlint,
parameters: ['<mode>']
},
async (argv) => {
intro('opencommit — configure @commitlint');
try {
const { mode } = argv._;
if (mode === CONFIG_MODES.get) {
const commitLintConfig = await getCommitlintLLMConfig();
outro(commitLintConfig.toString());
return;
}
if (mode === CONFIG_MODES.force) {
await configureCommitlintIntegration(true);
return;
}
throw new Error(
`Unsupported mode: ${mode}. Valid modes are: "force" and "get"`
);
} catch (error) {
outro(`${chalk.red('✖')} ${error}`);
process.exit(1);
}
}
);

View File

@@ -1,33 +1,25 @@
import chalk from 'chalk';
import { command } from 'cleye'; import { command } from 'cleye';
import * as dotenv from 'dotenv'; import { join as pathJoin } from 'path';
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { parse as iniParse, stringify as iniStringify } from 'ini'; import { parse as iniParse, stringify as iniStringify } from 'ini';
import { existsSync, writeFileSync, readFileSync } from 'fs';
import { homedir } from 'os'; import { homedir } from 'os';
import { join as pathJoin, resolve as pathResolve } from 'path';
import { intro, outro } from '@clack/prompts'; import { intro, outro } from '@clack/prompts';
import chalk from 'chalk';
import { COMMANDS } from '../CommandsEnum'; import { COMMANDS } from '../CommandsEnum';
import { getI18nLocal } from '../i18n'; import { getI18nLocal } from '../i18n';
import * as dotenv from 'dotenv';
dotenv.config();
export enum CONFIG_KEYS { export enum CONFIG_KEYS {
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY', OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY', OCO_OPENAI_MAX_TOKENS = 'OCO_OPENAI_MAX_TOKENS',
OCO_AZURE_API_KEY = 'OCO_AZURE_API_KEY',
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH', OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
OCO_DESCRIPTION = 'OCO_DESCRIPTION', OCO_DESCRIPTION = 'OCO_DESCRIPTION',
OCO_EMOJI = 'OCO_EMOJI', OCO_EMOJI = 'OCO_EMOJI',
OCO_MODEL = 'OCO_MODEL', OCO_MODEL = 'OCO_MODEL',
OCO_LANGUAGE = 'OCO_LANGUAGE', OCO_LANGUAGE = 'OCO_LANGUAGE'
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
OCO_GITPUSH = 'OCO_GITPUSH',
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT'
} }
export enum CONFIG_MODES { export enum CONFIG_MODES {
@@ -35,37 +27,6 @@ export enum CONFIG_MODES {
set = 'set' set = 'set'
} }
export const MODEL_LIST = {
openai: ['gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-4',
'gpt-4-turbo',
'gpt-4-1106-preview',
'gpt-4-turbo-preview',
'gpt-4-0125-preview',
'gpt-4o'],
anthropic: ['claude-3-haiku-20240307',
'claude-3-sonnet-20240229',
'claude-3-opus-20240229']
}
const getDefaultModel = (provider: string | undefined): string => {
switch (provider) {
case 'ollama':
return '';
case 'anthropic':
return MODEL_LIST.anthropic[0];
default:
return MODEL_LIST.openai[0];
}
};
export enum DEFAULT_TOKEN_LIMITS {
DEFAULT_MAX_TOKENS_INPUT = 4096,
DEFAULT_MAX_TOKENS_OUTPUT = 500
}
const validateConfig = ( const validateConfig = (
key: string, key: string,
condition: any, condition: any,
@@ -81,37 +42,17 @@ const validateConfig = (
}; };
export const configValidators = { export const configValidators = {
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) { [CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any) {
//need api key unless running locally with ollama validateConfig(CONFIG_KEYS.OCO_OPENAI_API_KEY, value, 'Cannot be empty');
validateConfig( validateConfig(
'OpenAI API_KEY', CONFIG_KEYS.OCO_OPENAI_API_KEY,
value || config.OCO_ANTHROPIC_API_KEY || config.OCO_AI_PROVIDER.startsWith('ollama') || config.OCO_AZURE_API_KEY || config.OCO_AI_PROVIDER == 'test' , value.startsWith('sk-'),
'You need to provide an OpenAI/Anthropic/Azure API key' 'Must start with "sk-"'
); );
validateConfig( validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY, CONFIG_KEYS.OCO_OPENAI_API_KEY,
value.startsWith('sk-') || config.OCO_AI_PROVIDER != 'openai', value.length === 51,
'Must start with "sk-" for openai provider' 'Must be 51 characters long'
);
return value;
},
[CONFIG_KEYS.OCO_AZURE_API_KEY](value: any, config: any = {}) {
validateConfig(
'ANTHROPIC_API_KEY',
value || config.OCO_OPENAI_API_KEY || config.OCO_AZURE_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure API key'
);
return value;
},
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
validateConfig(
'ANTHROPIC_API_KEY',
value || config.OCO_OPENAI_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
'You need to provide an OpenAI/Anthropic/Azure API key'
); );
return value; return value;
@@ -127,38 +68,19 @@ export const configValidators = {
return value; return value;
}, },
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) { [CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS](value: any) {
// If the value is a string, convert it to a number. // If the value is a string, convert it to a number.
if (typeof value === 'string') { if (typeof value === 'string') {
value = parseInt(value); value = parseInt(value);
validateConfig( validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
!isNaN(value), !isNaN(value),
'Must be a number' 'Must be a number'
); );
} }
validateConfig( validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_INPUT, CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
value ? typeof value === 'number' : undefined, typeof value === 'number',
'Must be a number'
);
return value;
},
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT](value: any) {
// If the value is a string, convert it to a number.
if (typeof value === 'string') {
value = parseInt(value);
validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
!isNaN(value),
'Must be a number'
);
}
validateConfig(
CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT,
value ? typeof value === 'number' : undefined,
'Must be a number' 'Must be a number'
); );
@@ -193,123 +115,31 @@ export const configValidators = {
return value; return value;
}, },
[CONFIG_KEYS.OCO_MODEL](value: any, config: any = {}) { [CONFIG_KEYS.OCO_MODEL](value: any) {
validateConfig( validateConfig(
CONFIG_KEYS.OCO_MODEL, CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
[...MODEL_LIST.openai, ...MODEL_LIST.anthropic].includes(value) || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test'|| config.OCO_AI_PROVIDER == 'azure', value === 'gpt-3.5-turbo' || value === 'gpt-4',
`${value} is not supported yet, use 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview', 'gpt-4-0125-preview', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229' or 'claude-3-haiku-20240307'` `${value} is not supported yet, use 'gpt-4' or 'gpt-3.5-turbo' (default)`
);
validateConfig(
CONFIG_KEYS.OCO_MODEL,
typeof value === 'string' &&
value.match(/^[a-zA-Z0-9~\-]{1,63}[a-zA-Z0-9]$/) ||
config.OCO_AI_PROVIDER != 'azure',
`${value} is not model deployed name.`
); );
return value; return value;
}, }
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
value.startsWith('$'),
`${value} must start with $, for example: '$msg'`
);
return value;
},
[CONFIG_KEYS.OCO_PROMPT_MODULE](value: any) {
validateConfig(
CONFIG_KEYS.OCO_PROMPT_MODULE,
['conventional-commit', '@commitlint'].includes(value),
`${value} is not supported yet, use '@commitlint' or 'conventional-commit' (default)`
);
return value;
},
[CONFIG_KEYS.OCO_GITPUSH](value: any) {
validateConfig(
CONFIG_KEYS.OCO_GITPUSH,
typeof value === 'boolean',
'Must be true or false'
);
return value;
},
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER,
[
'',
'openai',
'anthropic',
'azure',
'ollama',
'test'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama/{model}', 'azure', 'anthropic' or 'openai' (default)`
);
return value;
},
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_ONE_LINE_COMMIT,
typeof value === 'boolean',
'Must be true or false'
);
return value;
},
[CONFIG_KEYS.OCO_AZURE_ENDPOINT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AZURE_ENDPOINT,
value.includes('openai.azure.com'),
'Must be in format "https://<resource name>.openai.azure.com/"'
);
return value;
},
}; };
export type ConfigType = { export type ConfigType = {
[key in CONFIG_KEYS]?: any; [key in CONFIG_KEYS]?: any;
}; };
const defaultConfigPath = pathJoin(homedir(), '.opencommit'); const configPath = pathJoin(homedir(), '.opencommit');
const defaultEnvPath = pathResolve(process.cwd(), '.env');
export const getConfig = ({ export const getConfig = (): ConfigType | null => {
configPath = defaultConfigPath,
envPath = defaultEnvPath
}: {
configPath?: string
envPath?: string
} = {}): ConfigType | null => {
dotenv.config({ path: envPath });
const configFromEnv = { const configFromEnv = {
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY, OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY, OCO_OPENAI_MAX_TOKENS: Number(process.env.OCO_OPENAI_MAX_TOKENS),
OCO_AZURE_API_KEY: process.env.OCO_AZURE_API_KEY,
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
? Number(process.env.OCO_TOKENS_MAX_INPUT)
: undefined,
OCO_TOKENS_MAX_OUTPUT: process.env.OCO_TOKENS_MAX_OUTPUT
? Number(process.env.OCO_TOKENS_MAX_OUTPUT)
: undefined,
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH, OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false, OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,
OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false, OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false,
OCO_MODEL: process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER), OCO_MODEL: process.env.OCO_MODEL,
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en', OCO_LANGUAGE: process.env.OCO_LANGUAGE
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit',
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai',
OCO_GITPUSH: process.env.OCO_GITPUSH === 'false' ? false : true,
OCO_ONE_LINE_COMMIT:
process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false,
OCO_AZURE_ENDPOINT: process.env.OCO_AZURE_ENDPOINT || '',
}; };
const configExists = existsSync(configPath); const configExists = existsSync(configPath);
@@ -319,22 +149,17 @@ export const getConfig = ({
const config = iniParse(configFile); const config = iniParse(configFile);
for (const configKey of Object.keys(config)) { for (const configKey of Object.keys(config)) {
if (
['null', 'undefined'].includes(config[configKey])
) {
config[configKey] = undefined;
continue;
}
try { try {
const validator = configValidators[configKey as CONFIG_KEYS]; const validator = configValidators[configKey as CONFIG_KEYS];
const validValue = validator( const validValue = validator(
config[configKey] ?? configFromEnv[configKey as CONFIG_KEYS], config[configKey] ?? configFromEnv[configKey as CONFIG_KEYS]
config
); );
config[configKey] = validValue; config[configKey] = validValue;
} catch (error) { } catch (error) {
outro(`Unknown '${configKey}' config option or missing validator.`); outro(
`'${configKey}' name is invalid, it should be either 'OCO_${configKey.toUpperCase()}' or it doesn't exist.`
);
outro( outro(
`Manually fix the '.env' file or global '~/.opencommit' config file.` `Manually fix the '.env' file or global '~/.opencommit' config file.`
); );
@@ -345,7 +170,7 @@ export const getConfig = ({
return config; return config;
}; };
export const setConfig = (keyValues: [key: string, value: string][], configPath: string = defaultConfigPath) => { export const setConfig = (keyValues: [key: string, value: string][]) => {
const config = getConfig() || {}; const config = getConfig() || {};
for (const [configKey, configValue] of keyValues) { for (const [configKey, configValue] of keyValues) {

View File

@@ -1,13 +1,11 @@
import chalk from 'chalk';
import { command } from 'cleye';
import { existsSync } from 'fs';
import fs from 'fs/promises'; import fs from 'fs/promises';
import path from 'path'; import path from 'path';
import { command } from 'cleye';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum.js';
import { assertGitRepo, getCoreHooksPath } from '../utils/git.js'; import { assertGitRepo, getCoreHooksPath } from '../utils/git.js';
import { existsSync } from 'fs';
import chalk from 'chalk';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum.js';
const HOOK_NAME = 'prepare-commit-msg'; const HOOK_NAME = 'prepare-commit-msg';
const DEFAULT_SYMLINK_URL = path.join('.git', 'hooks', HOOK_NAME); const DEFAULT_SYMLINK_URL = path.join('.git', 'hooks', HOOK_NAME);
@@ -94,7 +92,7 @@ export const hookCommand = command(
} }
throw new Error( throw new Error(
`Unsupported mode: ${mode}. Supported modes are: 'set' or 'unset', do: \`oco hook set\`` `Unsupported mode: ${mode}. Supported modes are: 'set' or 'unset'`
); );
} catch (error) { } catch (error) {
outro(`${chalk.red('✖')} ${error}`); outro(`${chalk.red('✖')} ${error}`);

View File

@@ -1,11 +1,9 @@
import chalk from 'chalk';
import fs from 'fs/promises'; import fs from 'fs/promises';
import chalk from 'chalk';
import { intro, outro, spinner } from '@clack/prompts'; import { intro, outro, spinner } from '@clack/prompts';
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
import { getChangedFiles, getDiff, getStagedFiles, gitAdd } from '../utils/git'; import { getChangedFiles, getDiff, getStagedFiles, gitAdd } from '../utils/git';
import { getConfig } from './config'; import { getConfig } from './config';
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
const [messageFilePath, commitSource] = process.argv.slice(2); const [messageFilePath, commitSource] = process.argv.slice(2);
@@ -26,7 +24,7 @@ export const prepareCommitMessageHook = async (
if (changedFiles) await gitAdd({ files: changedFiles }); if (changedFiles) await gitAdd({ files: changedFiles });
else { else {
outro('No changes detected, write some code and run `oco` again'); outro('No changes detected, write some code and run `oc` again');
process.exit(1); process.exit(1);
} }
} }
@@ -39,9 +37,9 @@ export const prepareCommitMessageHook = async (
const config = getConfig(); const config = getConfig();
if (!config?.OCO_OPENAI_API_KEY && !config?.OCO_ANTHROPIC_API_KEY && !config?.OCO_AZURE_API_KEY) { if (!config?.OCO_OPENAI_API_KEY) {
throw new Error( throw new Error(
'No OPEN_AI_API or OCO_ANTHROPIC_API_KEY or OCO_AZURE_API_KEY exists. Set your key in ~/.opencommit' 'No OPEN_AI_API exists. Set your OPEN_AI_API=<key> in ~/.opencommit'
); );
} }

View File

@@ -1,7 +0,0 @@
import { ChatCompletionRequestMessage } from 'openai';
export interface AiEngine {
generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined>;
}

View File

@@ -1,124 +0,0 @@
import axios from 'axios';
import chalk from 'chalk';
import Anthropic from '@anthropic-ai/sdk';
import {ChatCompletionRequestMessage} from 'openai'
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources';
import { intro, outro } from '@clack/prompts';
import {
CONFIG_MODES,
DEFAULT_TOKEN_LIMITS,
getConfig
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
import { MODEL_LIST } from '../commands/config';
const config = getConfig();
const MAX_TOKENS_OUTPUT =
config?.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const MAX_TOKENS_INPUT =
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
let provider = config?.OCO_AI_PROVIDER;
let apiKey = config?.OCO_ANTHROPIC_API_KEY;
const [command, mode] = process.argv.slice(2);
if (
provider === 'anthropic' &&
!apiKey &&
command !== 'config' &&
mode !== CONFIG_MODES.set
) {
intro('opencommit');
outro(
'OCO_ANTHROPIC_API_KEY is not set, please run `oco config set OCO_ANTHROPIC_API_KEY=<your token> . If you are using Claude, make sure you add payment details, so API works.`'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);
process.exit(1);
}
const MODEL = config?.OCO_MODEL;
if (provider === 'anthropic' &&
!MODEL_LIST.anthropic.includes(MODEL) &&
command !== 'config' &&
mode !== CONFIG_MODES.set) {
outro(
`${chalk.red('✖')} Unsupported model ${MODEL} for Anthropic. Supported models are: ${MODEL_LIST.anthropic.join(
', '
)}`
);
process.exit(1);
}
class AnthropicAi implements AiEngine {
private anthropicAiApiConfiguration = {
apiKey: apiKey
};
private anthropicAI!: Anthropic;
constructor() {
this.anthropicAI = new Anthropic(this.anthropicAiApiConfiguration);
}
public generateCommitMessage = async (
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> => {
const systemMessage = messages.find(msg => msg.role === 'system')?.content as string;
const restMessages = messages.filter((msg) => msg.role !== 'system') as MessageParam[];
const params: MessageCreateParamsNonStreaming = {
model: MODEL,
system: systemMessage,
messages: restMessages,
temperature: 0,
top_p: 0.1,
max_tokens: MAX_TOKENS_OUTPUT
};
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content as string) + 4)
.reduce((a, b) => a + b, 0);
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}
const data = await this.anthropicAI.messages.create(params);
const message = data?.content[0].text;
return message;
} catch (error) {
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const anthropicAiError = error.response.data.error;
if (anthropicAiError?.message) outro(anthropicAiError.message);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);
}
throw err;
}
};
}
export const anthropicAi = new AnthropicAi();

View File

@@ -1,109 +0,0 @@
import axios from 'axios';
import chalk from 'chalk';
import { execa } from 'execa';
import {
ChatCompletionRequestMessage,
} from 'openai';
import { OpenAIClient, AzureKeyCredential } from '@azure/openai';
import { intro, outro } from '@clack/prompts';
import {
CONFIG_MODES,
DEFAULT_TOKEN_LIMITS,
getConfig
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
const config = getConfig();
const MAX_TOKENS_OUTPUT =
config?.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const MAX_TOKENS_INPUT =
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
let basePath = config?.OCO_OPENAI_BASE_PATH;
let apiKey = config?.OCO_AZURE_API_KEY;
let apiEndpoint = config?.OCO_AZURE_ENDPOINT;
const [command, mode] = process.argv.slice(2);
const provider = config?.OCO_AI_PROVIDER;
if (
provider === 'azure' &&
!apiKey &&
!apiEndpoint &&
command !== 'config' &&
mode !== CONFIG_MODES.set
) {
intro('opencommit');
outro(
'OCO_AZURE_API_KEY or OCO_AZURE_ENDPOINT are not set, please run `oco config set OCO_AZURE_API_KEY=<your token> . If you are using GPT, make sure you add payment details, so API works.`'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);
process.exit(1);
}
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
class Azure implements AiEngine {
private openAI!: OpenAIClient;
constructor() {
if (provider === 'azure') {
this.openAI = new OpenAIClient(apiEndpoint, new AzureKeyCredential(apiKey));
}
}
public generateCommitMessage = async (
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> => {
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content) + 4)
.reduce((a, b) => a + b, 0);
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}
const data = await this.openAI.getChatCompletions(MODEL, messages);
const message = data.choices[0].message;
if (message?.content === null) {
return undefined;
}
return message?.content;
} catch (error) {
outro(`${chalk.red('✖')} ${MODEL}`);
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const openAiError = error.response.data.error;
if (openAiError?.message) outro(openAiError.message);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
);
}
throw err;
}
};
}
export const azure = new Azure();

View File

@@ -1,49 +0,0 @@
import axios, { AxiosError } from 'axios';
import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
import {
getConfig
} from '../commands/config';
const config = getConfig();
export class OllamaAi implements AiEngine {
private model = "mistral"; // as default model of Ollama
setModel(model: string) {
this.model = model ?? config?.OCO_MODEL ?? 'mistral';
}
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
const model = this.model;
//console.log(messages);
//process.exit()
const url = 'http://localhost:11434/api/chat';
const p = {
model,
messages,
options: { temperature: 0, top_p: 0.1 },
stream: false
};
try {
const response = await axios.post(url, p, {
headers: {
'Content-Type': 'application/json'
}
});
const message = response.data.message;
return message?.content;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error('local model issues. details: ' + message);
}
}
}
export const ollamaAi = new OllamaAi();

View File

@@ -1,12 +0,0 @@
import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
export class TestAi implements AiEngine {
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
return 'test commit message';
}
}
export const testAi = new TestAi();

View File

@@ -2,26 +2,62 @@ import {
ChatCompletionRequestMessage, ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum ChatCompletionRequestMessageRoleEnum
} from 'openai'; } from 'openai';
import { api } from './api';
import { DEFAULT_TOKEN_LIMITS, getConfig } from './commands/config'; import { getConfig } from './commands/config';
import { getMainCommitPrompt } from './prompts';
import { mergeDiffs } from './utils/mergeDiffs'; import { mergeDiffs } from './utils/mergeDiffs';
import { i18n, I18nLocals } from './i18n';
import { tokenCount } from './utils/tokenCount'; import { tokenCount } from './utils/tokenCount';
import { getEngine } from './utils/engine';
const config = getConfig(); const config = getConfig();
const MAX_TOKENS_INPUT = const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
const MAX_TOKENS_OUTPUT =
config?.OCO_TOKENS_MAX_OUTPUT ||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
const generateCommitMessageChatCompletionPrompt = async ( const INIT_MESSAGES_PROMPT: Array<ChatCompletionRequestMessage> = [
diff: string, {
fullGitMojiSpec: boolean role: ChatCompletionRequestMessageRoleEnum.System,
): Promise<Array<ChatCompletionRequestMessage>> => { // prettier-ignore
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec); content: `You are to act as the author of a commit message in git. Your mission is to create clean and comprehensive commit messages in the conventional commit convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.': 'Do not preface the commit with anything.'}
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.': "Don't add any descriptions to the commit, only commit message."}
Use the present tense. Lines must not be longer than 74 characters. Use ${translation.localLanguage} to answer.`
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: `diff --git a/src/server.ts b/src/server.ts
index ad4db42..f3b18a9 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -10,7 +10,7 @@
import {
initWinstonLogger();
const app = express();
-const port = 7799;
+const PORT = 7799;
app.use(express.json());
@@ -34,6 +34,6 @@
app.use((_, res, next) => {
// ROUTES
app.use(PROTECTED_ROUTER_URL, protectedRouter);
-app.listen(port, () => {
- console.log(\`Server listening on port \${port}\`);
+app.listen(process.env.PORT || PORT, () => {
+ console.log(\`Server listening on port \${PORT}\`);
});`
},
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: `${config?.OCO_EMOJI ? '🐛 ' : ''}${translation.commitFix}
${config?.OCO_EMOJI ? '✨ ' : ''}${translation.commitFeat}
${config?.OCO_DESCRIPTION ? translation.commitDescription : ''}`
}
];
const generateCommitMessageChatCompletionPrompt = (
diff: string
): Array<ChatCompletionRequestMessage> => {
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT]; const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
chatContextAsCompletionRequest.push({ chatContextAsCompletionRequest.push({
@@ -35,57 +71,42 @@ const generateCommitMessageChatCompletionPrompt = async (
export enum GenerateCommitMessageErrorEnum { export enum GenerateCommitMessageErrorEnum {
tooMuchTokens = 'TOO_MUCH_TOKENS', tooMuchTokens = 'TOO_MUCH_TOKENS',
internalError = 'INTERNAL_ERROR', internalError = 'INTERNAL_ERROR',
emptyMessage = 'EMPTY_MESSAGE', emptyMessage = 'EMPTY_MESSAGE'
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
} }
const ADJUSTMENT_FACTOR = 20; interface GenerateCommitMessageError {
error: GenerateCommitMessageErrorEnum;
export const generateCommitMessageByDiff = async ( }
diff: string,
fullGitMojiSpec: boolean
): Promise<string> => {
try {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec);
const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map( const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map(
(msg) => tokenCount(msg.content) + 4 (msg) => tokenCount(msg.content) + 4
).reduce((a, b) => a + b, 0); ).reduce((a, b) => a + b, 0);
const MAX_REQUEST_TOKENS = const MAX_REQ_TOKENS = 3000 - INIT_MESSAGES_PROMPT_LENGTH;
MAX_TOKENS_INPUT -
ADJUSTMENT_FACTOR -
INIT_MESSAGES_PROMPT_LENGTH -
MAX_TOKENS_OUTPUT;
if (tokenCount(diff) >= MAX_REQUEST_TOKENS) { export const generateCommitMessageByDiff = async (
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs( diff: string
): Promise<string> => {
try {
if (tokenCount(diff) >= MAX_REQ_TOKENS) {
const commitMessagePromises = getCommitMsgsPromisesFromFileDiffs(
diff, diff,
MAX_REQUEST_TOKENS, MAX_REQ_TOKENS
fullGitMojiSpec
); );
const commitMessages = []; const commitMessages = await Promise.all(commitMessagePromises);
for (const promise of commitMessagePromises) {
commitMessages.push(await promise);
await delay(2000);
}
return commitMessages.join('\n\n'); return commitMessages.join('\n\n');
} } else {
const messages = generateCommitMessageChatCompletionPrompt(diff);
const messages = await generateCommitMessageChatCompletionPrompt( const commitMessage = await api.generateCommitMessage(messages);
diff,
fullGitMojiSpec
);
const engine = getEngine();
const commitMessage = await engine.generateCommitMessage(messages);
if (!commitMessage) if (!commitMessage)
throw new Error(GenerateCommitMessageErrorEnum.emptyMessage); throw new Error(GenerateCommitMessageErrorEnum.emptyMessage);
return commitMessage; return commitMessage;
}
} catch (error) { } catch (error) {
throw error; throw error;
} }
@@ -94,8 +115,7 @@ export const generateCommitMessageByDiff = async (
function getMessagesPromisesByChangesInFile( function getMessagesPromisesByChangesInFile(
fileDiff: string, fileDiff: string,
separator: string, separator: string,
maxChangeLength: number, maxChangeLength: number
fullGitMojiSpec: boolean
) { ) {
const hunkHeaderSeparator = '@@ '; const hunkHeaderSeparator = '@@ ';
const [fileHeader, ...fileDiffByLines] = fileDiff.split(hunkHeaderSeparator); const [fileHeader, ...fileDiffByLines] = fileDiff.split(hunkHeaderSeparator);
@@ -106,74 +126,25 @@ function getMessagesPromisesByChangesInFile(
maxChangeLength maxChangeLength
); );
const lineDiffsWithHeader = []; const lineDiffsWithHeader = mergedChanges.map(
for (const change of mergedChanges) { (change) => fileHeader + change
const totalChange = fileHeader + change;
if (tokenCount(totalChange) > maxChangeLength) {
// If the totalChange is too large, split it into smaller pieces
const splitChanges = splitDiff(totalChange, maxChangeLength);
lineDiffsWithHeader.push(...splitChanges);
} else {
lineDiffsWithHeader.push(totalChange);
}
}
const engine = getEngine();
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
async (lineDiff) => {
const messages = await generateCommitMessageChatCompletionPrompt(
separator + lineDiff,
fullGitMojiSpec
); );
return engine.generateCommitMessage(messages); const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map((lineDiff) => {
} const messages = generateCommitMessageChatCompletionPrompt(
separator + lineDiff
); );
return api.generateCommitMessage(messages);
});
return commitMsgsFromFileLineDiffs; return commitMsgsFromFileLineDiffs;
} }
function splitDiff(diff: string, maxChangeLength: number) { export function getCommitMsgsPromisesFromFileDiffs(
const lines = diff.split('\n');
const splitDiffs = [];
let currentDiff = '';
if (maxChangeLength <= 0) {
throw new Error(GenerateCommitMessageErrorEnum.outputTokensTooHigh);
}
for (let line of lines) {
// If a single line exceeds maxChangeLength, split it into multiple lines
while (tokenCount(line) > maxChangeLength) {
const subLine = line.substring(0, maxChangeLength);
line = line.substring(maxChangeLength);
splitDiffs.push(subLine);
}
// Check the tokenCount of the currentDiff and the line separately
if (tokenCount(currentDiff) + tokenCount('\n' + line) > maxChangeLength) {
// If adding the next line would exceed the maxChangeLength, start a new diff
splitDiffs.push(currentDiff);
currentDiff = line;
} else {
// Otherwise, add the line to the current diff
currentDiff += '\n' + line;
}
}
// Add the last diff
if (currentDiff) {
splitDiffs.push(currentDiff);
}
return splitDiffs;
}
export const getCommitMsgsPromisesFromFileDiffs = async (
diff: string, diff: string,
maxDiffLength: number, maxDiffLength: number
fullGitMojiSpec: boolean ) {
) => {
const separator = 'diff --git '; const separator = 'diff --git ';
const diffByFiles = diff.split(separator).slice(1); const diffByFiles = diff.split(separator).slice(1);
@@ -189,25 +160,17 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
const messagesPromises = getMessagesPromisesByChangesInFile( const messagesPromises = getMessagesPromisesByChangesInFile(
fileDiff, fileDiff,
separator, separator,
maxDiffLength, maxDiffLength
fullGitMojiSpec
); );
commitMessagePromises.push(...messagesPromises); commitMessagePromises.push(...messagesPromises);
} else { } else {
const messages = await generateCommitMessageChatCompletionPrompt( const messages = generateCommitMessageChatCompletionPrompt(
separator + fileDiff, separator + fileDiff
fullGitMojiSpec
); );
const engine = getEngine(); commitMessagePromises.push(api.generateCommitMessage(messages));
commitMessagePromises.push(engine.generateCommitMessage(messages));
} }
} }
return commitMessagePromises; return commitMessagePromises;
};
function delay(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
} }

View File

@@ -1,14 +1,12 @@
import { unlinkSync, writeFileSync } from 'fs';
import core from '@actions/core'; import core from '@actions/core';
import exec from '@actions/exec';
import github from '@actions/github'; import github from '@actions/github';
import exec from '@actions/exec';
import { intro, outro } from '@clack/prompts'; import { intro, outro } from '@clack/prompts';
import { PushEvent } from '@octokit/webhooks-types'; import { PushEvent } from '@octokit/webhooks-types';
import { generateCommitMessageByDiff } from './generateCommitMessageFromGitDiff'; import { generateCommitMessageByDiff } from './generateCommitMessageFromGitDiff';
import { randomIntFromInterval } from './utils/randomIntFromInterval';
import { sleep } from './utils/sleep'; import { sleep } from './utils/sleep';
import { randomIntFromInterval } from './utils/randomIntFromInterval';
import { unlinkSync, writeFileSync } from 'fs';
// This should be a token with access to your repository scoped in as a secret. // This should be a token with access to your repository scoped in as a secret.
// The YML workflow will need to set GITHUB_TOKEN with the GitHub Secret Token // The YML workflow will need to set GITHUB_TOKEN with the GitHub Secret Token
@@ -135,16 +133,6 @@ async function improveCommitMessages(
improvedMessagesWithSHAs improvedMessagesWithSHAs
); );
// Check if there are actually any changes in the commit messages
const messagesChanged = improvedMessagesWithSHAs.some(
({ sha, msg }, index) => msg !== commitsToImprove[index].message
);
if (!messagesChanged) {
console.log('No changes in commit messages detected, skipping rebase');
return;
}
const createCommitMessageFile = (message: string, index: number) => const createCommitMessageFile = (message: string, index: number) =>
writeFileSync(`./commit-${index}.txt`, message); writeFileSync(`./commit-${index}.txt`, message);
improvedMessagesWithSHAs.forEach(({ msg }, i) => improvedMessagesWithSHAs.forEach(({ msg }, i) =>

View File

@@ -1,22 +1,22 @@
import cs from '../i18n/cs.json'; import en from '../i18n/en.json' assert { type: 'json' };
import de from '../i18n/de.json'; import cs from '../i18n/cs.json' assert { type: 'json' };
import en from '../i18n/en.json'; import de from '../i18n/de.json' assert { type: 'json' };
import es_ES from '../i18n/es_ES.json'; import fr from '../i18n/fr.json' assert { type: 'json' };
import fr from '../i18n/fr.json'; import it from '../i18n/it.json' assert { type: 'json' };
import id_ID from '../i18n/id_ID.json'; import ko from '../i18n/ko.json' assert { type: 'json' };
import it from '../i18n/it.json'; import zh_CN from '../i18n/zh_CN.json' assert { type: 'json' };
import ja from '../i18n/ja.json'; import zh_TW from '../i18n/zh_TW.json' assert { type: 'json' };
import ko from '../i18n/ko.json'; import ja from '../i18n/ja.json' assert { type: 'json' };
import nl from '../i18n/nl.json'; import pt_br from '../i18n/pt_br.json' assert { type: 'json' };
import pl from '../i18n/pl.json'; import vi_VN from '../i18n/vi_VN.json' assert { type: 'json' };
import pt_br from '../i18n/pt_br.json'; import es_ES from '../i18n/es_ES.json' assert { type: 'json' };
import ru from '../i18n/ru.json'; import sv from '../i18n/sv.json' assert { type: 'json' };
import sv from '../i18n/sv.json'; import nl from '../i18n/nl.json' assert { type: 'json' };
import th from '../i18n/th.json'; import ru from '../i18n/ru.json' assert { type: 'json' };
import tr from '../i18n/tr.json'; import id_ID from '../i18n/id_ID.json' assert { type: 'json' };
import vi_VN from '../i18n/vi_VN.json'; import pl from '../i18n/pl.json' assert { type: 'json' };
import zh_CN from '../i18n/zh_CN.json'; import tr from '../i18n/tr.json' assert { type: 'json' };
import zh_TW from '../i18n/zh_TW.json'; import th from '../i18n/th.json' assert { type: 'json' };
export enum I18nLocals { export enum I18nLocals {
'en' = 'en', 'en' = 'en',
@@ -36,7 +36,7 @@ export enum I18nLocals {
'id_ID' = 'id_ID', 'id_ID' = 'id_ID',
'pl' = 'pl', 'pl' = 'pl',
'tr' = 'tr', 'tr' = 'tr',
'th' = 'th' 'th' = 'th',
} }
export const i18n = { export const i18n = {

View File

@@ -1,84 +0,0 @@
import { spinner } from '@clack/prompts';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
import { computeHash } from './crypto';
import { commitlintPrompts, inferPromptsFromCommitlintConfig } from './prompts';
import { getCommitLintPWDConfig } from './pwd-commitlint';
import { CommitlintLLMConfig } from './types';
import * as utils from './utils';
import { getEngine } from '../../utils/engine';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
export const configureCommitlintIntegration = async (force = false) => {
const spin = spinner();
spin.start('Loading @commitlint configuration');
const fileExists = await utils.commitlintLLMConfigExists();
let commitLintConfig = await getCommitLintPWDConfig();
// debug complete @commitlint configuration
// await fs.writeFile(
// `${OPENCOMMIT_COMMITLINT_CONFIG}-commitlint-debug`,
// JSON.stringify(commitLintConfig, null, 2)
// );
const hash = await computeHash(JSON.stringify(commitLintConfig));
spin.stop(`Read @commitlint configuration (hash: ${hash})`);
if (fileExists) {
// Check if we need to update the prompts.
const { hash: existingHash } = await utils.getCommitlintLLMConfig();
if (hash === existingHash && !force) {
spin.stop(
'Hashes are the same, no need to update the config. Run "force" command to bypass.'
);
return;
}
}
spin.start('Generating consistency with given @commitlint rules');
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig);
const consistencyPrompts =
commitlintPrompts.GEN_COMMITLINT_CONSISTENCY_PROMPT(prompts);
// debug prompt which will generate a consistency
// await fs.writeFile(
// `${COMMITLINT_LLM_CONFIG}-debug`,
// consistencyPrompts.map((p) => p.content)
// );
const engine = getEngine();
let consistency =
(await engine.generateCommitMessage(consistencyPrompts)) || '{}';
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));
// sometimes consistency is preceded by explanatory text like "Here is your JSON:"
consistency = utils.getJSONBlock(consistency);
// ... remaining might be extra set of "\n"
consistency = utils.removeDoubleNewlines(consistency);
const commitlintLLMConfig: CommitlintLLMConfig = {
hash,
prompts,
consistency: {
[translation.localLanguage]: {
...JSON.parse(consistency as string)
}
}
};
await utils.writeCommitlintLLMConfig(commitlintLLMConfig);
spin.stop(`Done - please review contents of ${COMMITLINT_LLM_CONFIG_PATH}`);
};

View File

@@ -1 +0,0 @@
export const COMMITLINT_LLM_CONFIG_PATH = `${process.env.PWD}/.opencommit-commitlint`;

View File

@@ -1,15 +0,0 @@
import crypto from 'crypto';
export const computeHash = async (
content: string,
algorithm: string = 'sha256'
): Promise<string> => {
try {
const hash = crypto.createHash(algorithm);
hash.update(content);
return hash.digest('hex');
} catch (error) {
console.error('Error while computing hash:', error);
throw error;
}
};

View File

@@ -1,284 +0,0 @@
import chalk from 'chalk';
import {
ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { outro } from '@clack/prompts';
import {
PromptConfig,
QualifiedConfig,
RuleConfigSeverity,
RuleConfigTuple
} from '@commitlint/types';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { IDENTITY, INIT_DIFF_PROMPT } from '../../prompts';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
type DeepPartial<T> = {
[P in keyof T]?: {
[K in keyof T[P]]?: T[P][K];
};
};
type PromptFunction = (
applicable: string,
value: any,
prompt: DeepPartial<PromptConfig>
) => string;
type PromptResolverFunction = (
key: string,
applicable: string,
value: any,
prompt?: DeepPartial<PromptConfig>
) => string;
/**
* Extracts more contexte for each type-enum.
* IDEA: replicate the concept for scopes and refactor to a generic feature.
*/
const getTypeRuleExtraDescription = (
type: string,
prompt?: DeepPartial<PromptConfig>
) => prompt?.questions?.type?.enum?.[type]?.description;
/*
IDEA: Compress llm readable prompt for each section of commit message: one line for header, one line for scope, etc.
- The type must be in lowercase and should be one of the following values: featuring, fixing, documenting, styling, refactoring, testing, chores, perf, build, ci, revert.
- The scope should not be empty and provide context for the change (e.g., module or file changed).
- The subject should not be empty, should not end with a period, and should provide a concise description of the change. It should not be in sentence-case, start-case, pascal-case, or upper-case.
*/
const llmReadableRules: {
[ruleName: string]: PromptResolverFunction;
} = {
blankline: (key, applicable) =>
`There should ${applicable} be a blank line at the beginning of the ${key}.`,
caseRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be in ${
Array.isArray(value)
? `one of the following case:
- ${value.join('\n - ')}.`
: `${value} case.`
}`,
emptyRule: (key, applicable) => `The ${key} should ${applicable} be empty.`,
enumRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be one of the following values:
- ${Array.isArray(value) ? value.join('\n - ') : value}.`,
enumTypeRule: (key, applicable, value: string | Array<string>, prompt) =>
`The ${key} should ${applicable} be one of the following values:
- ${
Array.isArray(value)
? value
.map((v) => {
const description = getTypeRuleExtraDescription(v, prompt);
if (description) {
return `${v} (${description})`;
} else return v;
})
.join('\n - ')
: value
}.`,
fullStopRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} end with '${value}'.`,
maxLengthRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} have ${value} characters or less.`,
minLengthRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} have ${value} characters or more.`
};
/**
* TODO: Validate rules to every rule in the @commitlint configuration.
* IDEA: Plugins can extend the list of rule. Provide user with a way to infer or extend when "No prompt handler for rule".
*/
const rulesPrompts: {
[ruleName: string]: PromptFunction;
} = {
'body-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('body', applicable, value),
'body-empty': (applicable: string) =>
llmReadableRules.emptyRule('body', applicable, undefined),
'body-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('body', applicable, value),
'body-leading-blank': (applicable: string) =>
llmReadableRules.blankline('body', applicable, undefined),
'body-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('body', applicable, value),
'body-max-line-length': (applicable: string, value: string) =>
`Each line of the body should ${applicable} have ${value} characters or less.`,
'body-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('body', applicable, value),
'footer-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('footer', applicable, value),
'footer-empty': (applicable: string) =>
llmReadableRules.emptyRule('footer', applicable, undefined),
'footer-leading-blank': (applicable: string) =>
llmReadableRules.blankline('footer', applicable, undefined),
'footer-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('footer', applicable, value),
'footer-max-line-length': (applicable: string, value: string) =>
`Each line of the footer should ${applicable} have ${value} characters or less.`,
'footer-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('footer', applicable, value),
'header-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('header', applicable, value),
'header-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('header', applicable, value),
'header-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('header', applicable, value),
'header-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('header', applicable, value),
'references-empty': (applicable: string) =>
llmReadableRules.emptyRule('references section', applicable, undefined),
'scope-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('scope', applicable, value),
'scope-empty': (applicable: string) =>
llmReadableRules.emptyRule('scope', applicable, undefined),
'scope-enum': (applicable: string, value: string | Array<string>) =>
llmReadableRules.enumRule('type', applicable, value),
'scope-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('scope', applicable, value),
'scope-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('scope', applicable, value),
'signed-off-by': (applicable: string, value: string) =>
`The commit message should ${applicable} have a "Signed-off-by" line with the value "${value}".`,
'subject-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('subject', applicable, value),
'subject-empty': (applicable: string) =>
llmReadableRules.emptyRule('subject', applicable, undefined),
'subject-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('subject', applicable, value),
'subject-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('subject', applicable, value),
'subject-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('subject', applicable, value),
'type-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('type', applicable, value),
'type-empty': (applicable: string) =>
llmReadableRules.emptyRule('type', applicable, undefined),
'type-enum': (applicable: string, value: string | Array<string>, prompt) =>
llmReadableRules.enumTypeRule('type', applicable, value, prompt),
'type-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('type', applicable, value),
'type-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('type', applicable, value)
};
const getPrompt = (
ruleName: string,
ruleConfig: RuleConfigTuple<unknown>,
prompt: DeepPartial<PromptConfig>
) => {
const [severity, applicable, value] = ruleConfig;
// Should we exclude "Disabled" properties?
// Is this used to disable a subjacent rule when extending presets?
if (severity === RuleConfigSeverity.Disabled) return null;
const promptFn = rulesPrompts[ruleName];
if (promptFn) {
return promptFn(applicable, value, prompt);
}
// Plugins may add their custom rules.
// We might want to call OpenAI to build this rule's llm-readable prompt.
outro(`${chalk.red('✖')} No prompt handler for rule "${ruleName}".`);
return `Please manualy set the prompt for rule "${ruleName}".`;
};
export const inferPromptsFromCommitlintConfig = (
config: QualifiedConfig
): string[] => {
const { rules, prompt } = config;
if (!rules) return [];
return Object.keys(rules)
.map((ruleName) =>
getPrompt(ruleName, rules[ruleName] as RuleConfigTuple<unknown>, prompt)
)
.filter((prompt) => prompt !== null) as string[];
};
/**
* Breaking down commit message structure for conventional commit, and mapping bits with
* ubiquitous language from @commitlint.
* While gpt-4 does this on it self, gpt-3.5 can't map this on his own atm.
*/
const STRUCTURE_OF_COMMIT = `
- Header of commit is composed of type, scope, subject: <type-of-commit>(<scope-of-commit>): <subject-of-commit>
- Description of commit is composed of body and footer (optional): <body-of-commit>\n<footer(s)-of-commit>`;
// Prompt to generate LLM-readable rules based on @commitlint rules.
const GEN_COMMITLINT_CONSISTENCY_PROMPT = (
prompts: string[]
): ChatCompletionRequestMessage[] => [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
// prettier-ignore
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
Here are the specific requirements and conventions that should be strictly followed:
Commit Message Conventions:
- The commit message consists of three parts: Header, Body, and Footer.
- Header:
- Format: \`<type>(<scope>): <subject>\`
- ${prompts.join('\n- ')}
JSON Output Format:
- The JSON output should contain the commit messages for a bug fix and a new feature in the following format:
\`\`\`json
{
"localLanguage": "${translation.localLanguage}",
"commitFix": "<Header of commit for bug fix>",
"commitFeat": "<Header of commit for feature>",
"commitDescription": "<Description of commit for both the bug fix and the feature>"
}
\`\`\`
- The "commitDescription" should not include the commit messages header, only the description.
- Description should not be more than 74 characters.
Additional Details:
- Changing the variable 'port' to uppercase 'PORT' is considered a bug fix.
- Allowing the server to listen on a port specified through the environment variable is considered a new feature.
Example Git Diff is to follow:`
},
INIT_DIFF_PROMPT
];
/**
* Prompt to have LLM generate a message using @commitlint rules.
*
* @param language
* @param prompts
* @returns
*/
const INIT_MAIN_PROMPT = (
language: string,
prompts: string[]
): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.System,
// prettier-ignore
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.' : 'Do not preface the commit with anything.'}
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.' : "Don't add any descriptions to the commit, only commit message."}
Use the present tense. Use ${language} to answer.
${ config?.OCO_ONE_LINE_COMMIT ? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.' : ""}
You will strictly follow the following conventions to generate the content of the commit message:
- ${prompts.join('\n- ')}
The conventions refers to the following structure of commit message:
${STRUCTURE_OF_COMMIT}
`
});
export const commitlintPrompts = {
INIT_MAIN_PROMPT,
GEN_COMMITLINT_CONSISTENCY_PROMPT
};

View File

@@ -1,25 +0,0 @@
import path from 'path';
const nodeModulesPath = path.join(
process.env.PWD || process.cwd(),
'node_modules',
'@commitlint',
'load'
);
/**
* This code is loading the configuration for the `@commitlint` package from the current working
* directory (`process.env.PWD`) by requiring the `load` module from the `@commitlint` package.
*
* @returns
*/
export const getCommitLintPWDConfig = async () => {
const load = require(nodeModulesPath).default;
if (load && typeof load === 'function') {
return await load();
}
// @commitlint/load is not a function
return null;
};

View File

@@ -1,11 +0,0 @@
import { i18n } from '../../i18n';
export type ConsistencyPrompt = (typeof i18n)[keyof typeof i18n];
export type CommitlintLLMConfig = {
hash: string;
prompts: string[];
consistency: {
[key: string]: ConsistencyPrompt;
};
};

View File

@@ -1,57 +0,0 @@
import fs from 'fs/promises';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
import { CommitlintLLMConfig } from './types';
/**
* Removes the "\n" only if occurring twice
*/
export const removeDoubleNewlines = (input: string): string => {
const pattern = /\\n\\n/g;
if (pattern.test(input)) {
const newInput = input.replace(pattern, '');
return removeDoubleNewlines(newInput);
}
return input;
};
export const getJSONBlock = (input: string): string => {
const jsonIndex = input.search('```json');
if (jsonIndex > -1) {
input = input.slice(jsonIndex + 8);
const endJsonIndex = consistency.search('```');
input = input.slice(0, endJsonIndex);
}
return input;
};
export const commitlintLLMConfigExists = async (): Promise<boolean> => {
let exists;
try {
await fs.access(COMMITLINT_LLM_CONFIG_PATH);
exists = true;
} catch (e) {
exists = false;
}
return exists;
};
export const writeCommitlintLLMConfig = async (
commitlintLLMConfig: CommitlintLLMConfig
): Promise<void> => {
await fs.writeFile(
COMMITLINT_LLM_CONFIG_PATH,
JSON.stringify(commitlintLLMConfig, null, 2)
);
};
export const getCommitlintLLMConfig =
async (): Promise<CommitlintLLMConfig> => {
const content = await fs.readFile(COMMITLINT_LLM_CONFIG_PATH);
const commitLintLLMConfig = JSON.parse(
content.toString()
) as CommitlintLLMConfig;
return commitLintLLMConfig;
};

View File

@@ -1,207 +0,0 @@
import {
ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { note } from '@clack/prompts';
import { getConfig } from './commands/config';
import { i18n, I18nLocals } from './i18n';
import { configureCommitlintIntegration } from './modules/commitlint/config';
import { commitlintPrompts } from './modules/commitlint/prompts';
import { ConsistencyPrompt } from './modules/commitlint/types';
import * as utils from './modules/commitlint/utils';
import { removeConventionalCommitWord } from './utils/removeConventionalCommitWord';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
export const IDENTITY =
'You are to act as the author of a commit message in git.';
const INIT_MAIN_PROMPT = (
language: string,
fullGitMojiSpec: boolean
): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.System,
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages as per the ${
fullGitMojiSpec ? 'GitMoji specification' : 'conventional commit convention'
} and explain WHAT were the changes and mainly WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you are to convert it into a commit message.
${
config?.OCO_EMOJI
? 'Use GitMoji convention to preface the commit. Here are some help to choose the right emoji (emoji, description): ' +
'🐛, Fix a bug; ' +
'✨, Introduce new features; ' +
'📝, Add or update documentation; ' +
'🚀, Deploy stuff; ' +
'✅, Add, update, or pass tests; ' +
'♻️, Refactor code; ' +
'⬆️, Upgrade dependencies; ' +
'🔧, Add or update configuration files; ' +
'🌐, Internationalization and localization; ' +
'💡, Add or update comments in source code; ' +
`${
fullGitMojiSpec
? '🎨, Improve structure / format of the code; ' +
'⚡️, Improve performance; ' +
'🔥, Remove code or files; ' +
'🚑️, Critical hotfix; ' +
'💄, Add or update the UI and style files; ' +
'🎉, Begin a project; ' +
'🔒️, Fix security issues; ' +
'🔐, Add or update secrets; ' +
'🔖, Release / Version tags; ' +
'🚨, Fix compiler / linter warnings; ' +
'🚧, Work in progress; ' +
'💚, Fix CI Build; ' +
'⬇️, Downgrade dependencies; ' +
'📌, Pin dependencies to specific versions; ' +
'👷, Add or update CI build system; ' +
'📈, Add or update analytics or track code; ' +
', Add a dependency; ' +
', Remove a dependency; ' +
'🔨, Add or update development scripts; ' +
'✏️, Fix typos; ' +
'💩, Write bad code that needs to be improved; ' +
'⏪️, Revert changes; ' +
'🔀, Merge branches; ' +
'📦️, Add or update compiled files or packages; ' +
'👽️, Update code due to external API changes; ' +
'🚚, Move or rename resources (e.g.: files, paths, routes); ' +
'📄, Add or update license; ' +
'💥, Introduce breaking changes; ' +
'🍱, Add or update assets; ' +
'♿️, Improve accessibility; ' +
'🍻, Write code drunkenly; ' +
'💬, Add or update text and literals; ' +
'🗃️, Perform database related changes; ' +
'🔊, Add or update logs; ' +
'🔇, Remove logs; ' +
'👥, Add or update contributor(s); ' +
'🚸, Improve user experience / usability; ' +
'🏗️, Make architectural changes; ' +
'📱, Work on responsive design; ' +
'🤡, Mock things; ' +
'🥚, Add or update an easter egg; ' +
'🙈, Add or update a .gitignore file; ' +
'📸, Add or update snapshots; ' +
'⚗️, Perform experiments; ' +
'🔍️, Improve SEO; ' +
'🏷️, Add or update types; ' +
'🌱, Add or update seed files; ' +
'🚩, Add, update, or remove feature flags; ' +
'🥅, Catch errors; ' +
'💫, Add or update animations and transitions; ' +
'🗑️, Deprecate code that needs to be cleaned up; ' +
'🛂, Work on code related to authorization, roles and permissions; ' +
'🩹, Simple fix for a non-critical issue; ' +
'🧐, Data exploration/inspection; ' +
'⚰️, Remove dead code; ' +
'🧪, Add a failing test; ' +
'👔, Add or update business logic; ' +
'🩺, Add or update healthcheck; ' +
'🧱, Infrastructure related changes; ' +
'🧑‍💻, Improve developer experience; ' +
'💸, Add sponsorships or money related infrastructure; ' +
'🧵, Add or update code related to multithreading or concurrency; ' +
'🦺, Add or update code related to validation.'
: ''
}`
: 'Do not preface the commit with anything. Conventional commit keywords:' +
'fix, feat, build, chore, ci, docs, style, refactor, perf, test.'
}
${
config?.OCO_DESCRIPTION
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
: "Don't add any descriptions to the commit, only commit message."
}
${
config?.OCO_ONE_LINE_COMMIT
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
: ''
}
Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`
});
export const INIT_DIFF_PROMPT: ChatCompletionRequestMessage = {
role: ChatCompletionRequestMessageRoleEnum.User,
content: `diff --git a/src/server.ts b/src/server.ts
index ad4db42..f3b18a9 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -10,7 +10,7 @@
import {
initWinstonLogger();
const app = express();
-const port = 7799;
+const PORT = 7799;
app.use(express.json());
@@ -34,6 +34,6 @@
app.use((_, res, next) => {
// ROUTES
app.use(PROTECTED_ROUTER_URL, protectedRouter);
-app.listen(port, () => {
- console.log(\`Server listening on port \${port}\`);
+app.listen(process.env.PORT || PORT, () => {
+ console.log(\`Server listening on port \${PORT}\`);
});`
};
const INIT_CONSISTENCY_PROMPT = (
translation: ConsistencyPrompt
): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: `${
config?.OCO_EMOJI
? `🐛 ${removeConventionalCommitWord(translation.commitFix)}`
: translation.commitFix
}
${
config?.OCO_EMOJI
? `${removeConventionalCommitWord(translation.commitFeat)}`
: translation.commitFeat
}
${config?.OCO_DESCRIPTION ? translation.commitDescription : ''}`
});
export const getMainCommitPrompt = async (
fullGitMojiSpec: boolean
): Promise<ChatCompletionRequestMessage[]> => {
switch (config?.OCO_PROMPT_MODULE) {
case '@commitlint':
if (!(await utils.commitlintLLMConfigExists())) {
note(
`OCO_PROMPT_MODULE is @commitlint but you haven't generated consistency for this project yet.`
);
await configureCommitlintIntegration();
}
// Replace example prompt with a prompt that's generated by OpenAI for the commitlint config.
const commitLintConfig = await utils.getCommitlintLLMConfig();
return [
commitlintPrompts.INIT_MAIN_PROMPT(
translation.localLanguage,
commitLintConfig.prompts
),
INIT_DIFF_PROMPT,
INIT_CONSISTENCY_PROMPT(
commitLintConfig.consistency[
translation.localLanguage
] as ConsistencyPrompt
)
];
default:
// conventional-commit
return [
INIT_MAIN_PROMPT(translation.localLanguage, fullGitMojiSpec),
INIT_DIFF_PROMPT,
INIT_CONSISTENCY_PROMPT(translation)
];
}
};

View File

@@ -1,10 +1,7 @@
import { getOpenCommitLatestVersion } from '../api';
import currentPackage from '../../package.json' assert { type: 'json' };
import chalk from 'chalk'; import chalk from 'chalk';
import { outro } from '@clack/prompts';
import currentPackage from '../../package.json';
import { getOpenCommitLatestVersion } from '../version';
export const checkIsLatestVersion = async () => { export const checkIsLatestVersion = async () => {
const latestVersion = await getOpenCommitLatestVersion(); const latestVersion = await getOpenCommitLatestVersion();
@@ -12,7 +9,7 @@ export const checkIsLatestVersion = async () => {
const currentVersion = currentPackage.version; const currentVersion = currentPackage.version;
if (currentVersion !== latestVersion) { if (currentVersion !== latestVersion) {
outro( console.warn(
chalk.yellow( chalk.yellow(
` `
You are not using the latest stable version of OpenCommit with new features and bug fixes. You are not using the latest stable version of OpenCommit with new features and bug fixes.

View File

@@ -1,26 +0,0 @@
import { AiEngine } from '../engine/Engine';
import { api } from '../engine/openAi';
import { getConfig } from '../commands/config';
import { ollamaAi } from '../engine/ollama';
import { azure } from '../engine/azure';
import { anthropicAi } from '../engine/anthropic'
import { testAi } from '../engine/testAi';
export function getEngine(): AiEngine {
const config = getConfig();
const provider = config?.OCO_AI_PROVIDER;
if (provider?.startsWith('ollama')) {
const model = provider.split('/')[1];
if (model) ollamaAi.setModel(model);
return ollamaAi;
} else if (config?.OCO_AI_PROVIDER == 'anthropic') {
return anthropicAi;
} else if (config?.OCO_AI_PROVIDER == 'test') {
return testAi;
} else if (config?.OCO_AI_PROVIDER == 'azure') {
return azure;
}
// open ai gpt by default
return api;
}

View File

@@ -1,9 +1,8 @@
import { execa } from 'execa'; import { execa } from 'execa';
import { outro, spinner } from '@clack/prompts';
import { readFileSync } from 'fs'; import { readFileSync } from 'fs';
import ignore, { Ignore } from 'ignore'; import ignore, { Ignore } from 'ignore';
import { outro, spinner } from '@clack/prompts';
export const assertGitRepo = async () => { export const assertGitRepo = async () => {
try { try {
await execa('git', ['rev-parse']); await execa('git', ['rev-parse']);
@@ -27,10 +26,12 @@ export const getOpenCommitIgnore = (): Ignore => {
}; };
export const getCoreHooksPath = async(): Promise<string> => { export const getCoreHooksPath = async(): Promise<string> => {
const { stdout } = await execa('git', ['config', 'core.hooksPath']); const { stdout } = await execa('git', [
'config',
'core.hooksPath']);
return stdout; return stdout;
}; }
export const getStagedFiles = async (): Promise<string[]> => { export const getStagedFiles = async (): Promise<string[]> => {
const { stdout: gitDir } = await execa('git', [ const { stdout: gitDir } = await execa('git', [
@@ -75,30 +76,19 @@ export const getChangedFiles = async (): Promise<string[]> => {
export const gitAdd = async ({ files }: { files: string[] }) => { export const gitAdd = async ({ files }: { files: string[] }) => {
const gitAddSpinner = spinner(); const gitAddSpinner = spinner();
gitAddSpinner.start('Adding files to commit'); gitAddSpinner.start('Adding files to commit');
await execa('git', ['add', ...files]); await execa('git', ['add', ...files]);
gitAddSpinner.stop('Done'); gitAddSpinner.stop('Done');
}; };
export const getDiff = async ({ files }: { files: string[] }) => { export const getDiff = async ({ files }: { files: string[] }) => {
const lockFiles = files.filter( const lockFiles = files.filter(
(file) => (file) => file.includes('.lock') || file.includes('-lock.')
file.includes('.lock') ||
file.includes('-lock.') ||
file.includes('.svg') ||
file.includes('.png') ||
file.includes('.jpg') ||
file.includes('.jpeg') ||
file.includes('.webp') ||
file.includes('.gif')
); );
if (lockFiles.length) { if (lockFiles.length) {
outro( outro(
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join( `Some files are '.lock' files which are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join(
'\n' '\n'
)}` )}`
); );

View File

@@ -1,5 +1,4 @@
import { tokenCount } from './tokenCount'; import { tokenCount } from './tokenCount';
export function mergeDiffs(arr: string[], maxStringLength: number): string[] { export function mergeDiffs(arr: string[], maxStringLength: number): string[] {
const mergedArr: string[] = []; const mergedArr: string[] = [];
let currentItem: string = arr[0]; let currentItem: string = arr[0];

View File

@@ -1,3 +0,0 @@
export function removeConventionalCommitWord(message: string): string {
return message.replace(/^(fix|feat)\((.+?)\):/, '($2):');
}

View File

@@ -1,5 +1,5 @@
import cl100k_base from '@dqbd/tiktoken/encoders/cl100k_base.json';
import { Tiktoken } from '@dqbd/tiktoken/lite'; import { Tiktoken } from '@dqbd/tiktoken/lite';
import cl100k_base from '@dqbd/tiktoken/encoders/cl100k_base.json' assert { type: 'json' };
export function tokenCount(content: string): number { export function tokenCount(content: string): number {
const encoding = new Tiktoken( const encoding = new Tiktoken(

View File

@@ -1,14 +0,0 @@
import { outro } from '@clack/prompts';
import { execa } from 'execa';
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};

View File

@@ -1,22 +0,0 @@
FROM ubuntu:latest
RUN apt-get update && apt-get install -y curl git
# Install Node.js v20
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
RUN apt-get install -y nodejs
# Setup git
RUN git config --global user.email "test@example.com"
RUN git config --global user.name "Test User"
WORKDIR /app
COPY package.json /app/
COPY package-lock.json /app/
RUN ls -la
RUN npm ci
COPY . /app
RUN ls -la
RUN npm run build

View File

@@ -1,13 +0,0 @@
import { resolve } from 'path'
import { render } from 'cli-testing-library'
import 'cli-testing-library/extend-expect';
import { prepareEnvironment } from './utils';
it('cli flow when there are no changes', async () => {
const { gitDir, cleanup } = await prepareEnvironment();
const { findByText } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
expect(await findByText('No changes detected')).toBeInTheConsole();
await cleanup();
});

View File

@@ -1,56 +0,0 @@
import { resolve } from 'path'
import { render } from 'cli-testing-library'
import 'cli-testing-library/extend-expect';
import { prepareEnvironment } from './utils';
it('cli flow to generate commit message for 1 new file (staged)', async () => {
const { gitDir, cleanup } = await prepareEnvironment();
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
await render('git' ,['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
expect(await findByText('Generating the commit message')).toBeInTheConsole();
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
await cleanup();
});
it('cli flow to generate commit message for 1 changed file (not staged)', async () => {
const { gitDir, cleanup } = await prepareEnvironment();
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
await render('git' ,['add index.ts'], { cwd: gitDir });
await render('git' ,[`commit -m 'add new file'`], { cwd: gitDir });
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
expect(await findByText('No files are staged')).toBeInTheConsole();
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Generating the commit message')).toBeInTheConsole();
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Successfully committed')).toBeInTheConsole();
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
userEvent.keyboard('[Enter]');
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
await cleanup();
});

View File

@@ -1,31 +0,0 @@
import path from 'path'
import { mkdtemp, rm } from 'fs'
import { promisify } from 'util';
import { tmpdir } from 'os';
import { exec } from 'child_process';
const fsMakeTempDir = promisify(mkdtemp);
const fsExec = promisify(exec);
const fsRemove = promisify(rm);
/**
* Prepare the environment for the test
* Create a temporary git repository in the temp directory
*/
export const prepareEnvironment = async (): Promise<{
gitDir: string;
cleanup: () => Promise<void>;
}> => {
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
await fsExec('git init --bare remote.git', { cwd: tempDir });
await fsExec('git clone remote.git test', { cwd: tempDir });
const gitDir = path.resolve(tempDir, 'test');
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
}
return {
gitDir,
cleanup,
}
}

View File

@@ -1,7 +0,0 @@
import 'cli-testing-library/extend-expect'
import { configure } from 'cli-testing-library'
/**
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
*/
configure({ asyncUtilTimeout: 2000 })

View File

@@ -1,105 +0,0 @@
import { getConfig } from '../../src/commands/config';
import { prepareFile } from './utils';
describe('getConfig', () => {
const originalEnv = { ...process.env };
function resetEnv(env: NodeJS.ProcessEnv) {
Object.keys(process.env).forEach((key) => {
if (!(key in env)) {
delete process.env[key];
} else {
process.env[key] = env[key];
}
});
}
beforeEach(() => {
resetEnv(originalEnv);
});
afterAll(() => {
resetEnv(originalEnv);
});
it('return config values from the global config file', async () => {
const configFile = await prepareFile(
'.opencommit',
`
OCO_OPENAI_API_KEY="sk-key"
OCO_ANTHROPIC_API_KEY="secret-key"
OCO_TOKENS_MAX_INPUT="8192"
OCO_TOKENS_MAX_OUTPUT="1000"
OCO_OPENAI_BASE_PATH="/openai/api"
OCO_DESCRIPTION="true"
OCO_EMOJI="true"
OCO_MODEL="gpt-4"
OCO_LANGUAGE="de"
OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m"
OCO_PROMPT_MODULE="@commitlint"
OCO_AI_PROVIDER="ollama"
OCO_GITPUSH="false"
OCO_ONE_LINE_COMMIT="true"
`
);
const config = getConfig({ configPath: configFile.filePath, envPath: '' });
expect(config).not.toEqual(null);
expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key');
expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key');
expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192);
expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000);
expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api');
expect(config!['OCO_DESCRIPTION']).toEqual(true);
expect(config!['OCO_EMOJI']).toEqual(true);
expect(config!['OCO_MODEL']).toEqual('gpt-4');
expect(config!['OCO_LANGUAGE']).toEqual('de');
expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m');
expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint');
expect(config!['OCO_AI_PROVIDER']).toEqual('ollama');
expect(config!['OCO_GITPUSH']).toEqual(false);
expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true);
await configFile.cleanup();
});
it('return config values from the local env file', async () => {
const envFile = await prepareFile(
'.env',
`
OCO_OPENAI_API_KEY="sk-key"
OCO_ANTHROPIC_API_KEY="secret-key"
OCO_TOKENS_MAX_INPUT="8192"
OCO_TOKENS_MAX_OUTPUT="1000"
OCO_OPENAI_BASE_PATH="/openai/api"
OCO_DESCRIPTION="true"
OCO_EMOJI="true"
OCO_MODEL="gpt-4"
OCO_LANGUAGE="de"
OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m"
OCO_PROMPT_MODULE="@commitlint"
OCO_AI_PROVIDER="ollama"
OCO_GITPUSH="false"
OCO_ONE_LINE_COMMIT="true"
`
);
const config = getConfig({ configPath: '', envPath: envFile.filePath });
expect(config).not.toEqual(null);
expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key');
expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key');
expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192);
expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000);
expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api');
expect(config!['OCO_DESCRIPTION']).toEqual(true);
expect(config!['OCO_EMOJI']).toEqual(true);
expect(config!['OCO_MODEL']).toEqual('gpt-4');
expect(config!['OCO_LANGUAGE']).toEqual('de');
expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m');
expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint');
expect(config!['OCO_AI_PROVIDER']).toEqual('ollama');
expect(config!['OCO_GITPUSH']).toEqual(false);
expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true);
await envFile.cleanup();
});
});

View File

@@ -1,29 +0,0 @@
import path from 'path';
import { mkdtemp, rm, writeFile } from 'fs';
import { promisify } from 'util';
import { tmpdir } from 'os';
const fsMakeTempDir = promisify(mkdtemp);
const fsRemove = promisify(rm);
const fsWriteFile = promisify(writeFile);
/**
* Prepare tmp file for the test
*/
export async function prepareFile(
fileName: string,
content: string
): Promise<{
filePath: string;
cleanup: () => Promise<void>;
}> {
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
const filePath = path.resolve(tempDir, fileName);
await fsWriteFile(filePath, content);
const cleanup = async () => {
return fsRemove(tempDir, { recursive: true });
};
return {
filePath,
cleanup
};
}

View File

@@ -5,8 +5,8 @@
"module": "ESNext", "module": "ESNext",
// "rootDir": "./src", // "rootDir": "./src",
"resolveJsonModule": true,
"moduleResolution": "node", "moduleResolution": "node",
"resolveJsonModule": true,
"allowJs": true, "allowJs": true,
@@ -21,9 +21,6 @@
"skipLibCheck": true "skipLibCheck": true
}, },
"include": [
"test/jest-setup.ts"
],
"exclude": ["node_modules"], "exclude": ["node_modules"],
"ts-node": { "ts-node": {
"esm": true, "esm": true,