Compare commits

..

1 Commits

Author SHA1 Message Date
Sukharev
78456c4431 Revert "Suggest adding 'Edit' option to the Yes/No confirmation #53 (#70)"
This reverts commit 70f048672c.
2023-04-28 14:23:59 +08:00
45 changed files with 352 additions and 53512 deletions

View File

@@ -21,8 +21,8 @@
"rules": {
"prettier/prettier": "error",
"no-console": "error",
"import/order": "off",
"sort-imports": "off",
"import/order": "off",
"simple-import-sort/imports": "error",
"simple-import-sort/exports": "error",
"import/first": "error",

View File

@@ -9,7 +9,7 @@ Thanks for considering contributing to the project.
3. Create a new branch for your changes.
4. Make your changes and commit them with descriptive commit messages.
5. Push your changes to your forked repository.
6. Create a pull request from your branch to the `dev` branch. Not `master` branch, PR to `dev` branch, please.
6. Create a pull request from your branch to the `dev` branch.
## Getting started

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 304 KiB

After

Width:  |  Height:  |  Size: 318 KiB

2
.gitignore vendored
View File

@@ -1,7 +1,9 @@
node_modules/
coverage/
out/
temp/
build/
dist/
application.log
.DS_Store
/*.env

View File

@@ -1 +0,0 @@
out/github-action.cjs

View File

@@ -1 +0,0 @@
out

View File

@@ -1,3 +1,2 @@
/build
/dist
/out
/dist

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) Dima Sukharev, https://github.com/di-sukharev
Copyright (c) Dima Sukharev
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:

257
README.md
View File

@@ -3,12 +3,11 @@
<img src=".github/logo-grad.svg" alt="OpenCommit logo"/>
<h1 align="center">OpenCommit</h1>
<h4 align="center">Follow the bird <a href="https://twitter.com/io_Y_oi"><img src="https://img.shields.io/twitter/follow/io_Y_oi?style=flat&label=io_Y_oi&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
</h4>
</div>
<h2>Auto-generate meaningful commits in 1 second</h2>
<h2>GPT CLI to auto-generate impressive commits in 1 second</h2>
<p>Killing lame commits with AI 🤯🔫</p>
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
<h4 align="center">🪩 Winner of GitHub 2023 HACKATHON <a href="https://twitter.com/io_Y_oi/status/1683448136973582336"><img style="width:14px; height:14px; margin-top: -4px" src=".github/github-mark-white.png" align="center"></a>
</h4>
</div>
---
@@ -17,11 +16,9 @@
<img src=".github/opencommit-example.png" alt="OpenCommit example"/>
</div>
All the commits in this repo are authored by OpenCommit — look at [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emojis and long commit descriptions are configurable.
All the commits in this repo are done with OpenCommit — look into [the commits](https://github.com/di-sukharev/opencommit/commit/eae7618d575ee8d2e9fff5de56da79d40c4bc5fc) to see how OpenCommit works. Emoji and long commit description text is configurable.
## Setup OpenCommit as a CLI tool
You can use OpenCommit by simply running it via the CLI like this `oco`. 2 seconds and your staged changes are committed with a meaningful message.
## Setup
1. Install OpenCommit globally to use in any repository:
@@ -29,17 +26,15 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
npm install -g opencommit
```
MacOS may ask to run the command with `sudo` when installing a package globally.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure you add payment details, so API works.
3. Set the key to OpenCommit config:
```sh
oco config set OCO_OPENAI_API_KEY=<your_api_key>
opencommit config set OPENAI_API_KEY=<your_api_key>
```
Your API key is stored locally in the `~/.opencommit` config file.
Your api key is stored locally in `~/.opencommit` config file.
## Usage
@@ -50,144 +45,70 @@ git add <files...>
opencommit
```
You can also use the `oco` shortcut:
You can also use the `oc` shortcut:
```sh
git add <files...>
oco
oc
```
## Configuration
## Features
### Local per repo configuration
### Preface commits with emoji 🤠
Create a `.env` file and add OpenCommit config variables there like this:
[GitMoji](https://gitmoji.dev/) convention is used.
```env
OCO_OPENAI_API_KEY=<your OpenAI API token>
OCO_OPENAI_MAX_TOKENS=<max response tokens from OpenAI API>
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
OCO_EMOJI=<boolean, add GitMoji>
OCO_MODEL=<either 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
```
### Global config for all repos
Local config still has more priority than Global config, but you may set `OCO_MODEL` and `OCO_LOCALE` globally and set local configs for `OCO_EMOJI` and `OCO_DESCRIPTION` per repo which is more convenient.
Simply set any of the variables above like this:
To add emoji:
```sh
oco config set OCO_MODEL=gpt-4
oc config set emoji=true
```
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
To remove emoji:
```sh
oco config set OCO_EMOJI=true
oc config set emoji=false
```
To remove preface emojis:
### Postface commits with descriptions of changes
To add descriptions:
```sh
oco config set OCO_EMOJI=false
oc config set description=true
```
### Switch to GPT-4 or other models
By default, OpenCommit uses `gpt-3.5-turbo-16k` model.
You may switch to GPT-4 which performs better, but costs ~x15 times more 🤠
To remove description:
```sh
oco config set OCO_MODEL=gpt-4
oc config set description=false
```
or for as a cheaper option:
### Internationalization support
```sh
oco config set OCO_MODEL=gpt-3.5-turbo
```
Make sure that you spell it `gpt-4` (lowercase) and that you have API access to the 4th model. Even if you have ChatGPT+, that doesn't necessarily mean that you have API access to GPT-4.
### Locale configuration
To globally specify the language used to generate commit messages:
To specify the language used to generate commit messages:
```sh
# de, German ,Deutsch
oco config set OCO_LANGUAGE=de
oco config set OCO_LANGUAGE=German
oco config set OCO_LANGUAGE=Deutsch
oc config set language=de
oc config set language=German
oc config set language=Deutsch
# fr, French, française
oco config set OCO_LANGUAGE=fr
oco config set OCO_LANGUAGE=French
oco config set OCO_LANGUAGE=française
oc config set language=fr
oc config set language=French
oc config set language=française
```
The default language setting is **English**
The default language set is **English**
All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder
### Switch to `@commitlint`
### Git flags
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
You can set this option by running the following command:
The `opencommit` or `oc` commands can be used in place of the `git commit -m "${generatedMessage}"` command. This means that any regular flags that are used with the `git commit` command will also be applied when using `opencommit` or `oc`.
```sh
oco config set OCO_PROMPT_MODULE=<module>
```
Replace `<module>` with either `conventional-commit` or `@commitlint`.
#### Example:
To switch to using th` '@commitlint` prompt module, run:
```sh
oco config set OCO_PROMPT_MODULE=@commitlint
```
To switch back to the default conventional-commit message generator, run:
```sh
oco config set OCO_PROMPT_MODULE=conventional-commit
```
#### Integrating with `@commitlint`
The integration between `@commitlint` and OpenCommit is done automatically the first time OpenCommit is run with `OCO_PROMPT_MODULE` set to `@commitlint`. However, if you need to force set or reset the configuration for `@commitlint`, you can run the following command:
```sh
oco commitlint force
```
To view the generated configuration for `@commitlint`, you can use this command:
```sh
oco commitlint get
```
This allows you to ensure that the configuration is set up as desired.
Additionally, the integration creates a file named `.opencommit-commitlint` which contains the prompts used for the local `@commitlint` configuration. You can modify this file to fine-tune the example commit message generated by OpenAI. This gives you the flexibility to make adjustments based on your preferences or project guidelines.
OpenCommit generates a file named `.opencommit-commitlint` in your project directory which contains the prompts used for the local `@commitlint` configuration. You can modify this file to fine-tune the example commit message generated by OpenAI. If the local `@commitlint` configuration changes, this file will be updated the next time OpenCommit is run.
This offers you greater control over the generated commit messages, allowing for customization that aligns with your project's conventions.
## Git flags
The `opencommit` or `oco` commands can be used in place of the `git commit -m "${generatedMessage}"` command. This means that any regular flags that are used with the `git commit` command will also be applied when using `opencommit` or `oco`.
```sh
oco --no-verify
oc --no-verify
```
is translated to :
@@ -196,68 +117,33 @@ is translated to :
git commit -m "${generatedMessage}" --no-verify
```
To include a message in the generated message, you can utilize the template function, for instance:
```sh
oco '#205: $msg
```
> opencommit examines placeholders in the parameters, allowing you to append additional information before and after the placeholders, such as the relevant Issue or Pull Request. Similarly, you have the option to customize the OCO_MESSAGE_TEMPLATE_PLACEHOLDER configuration item, for example, simplifying it to $m!"
### Message Template Placeholder Config
#### Overview
The `OCO_MESSAGE_TEMPLATE_PLACEHOLDER` feature in the `opencommit` tool allows users to embed a custom message within the generated commit message using a template function. This configuration is designed to enhance the flexibility and customizability of commit messages, making it easier for users to include relevant information directly within their commits.
#### Implementation Details
In our codebase, the implementation of this feature can be found in the following segment:
```javascript
commitMessage = messageTemplate.replace(
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
commitMessage
);
```
This line is responsible for replacing the placeholder in the `messageTemplate` with the actual `commitMessage`.
#### Usage
For instance, using the command `oco '$msg #205`, users can leverage this feature. The provided code represents the backend mechanics of such commands, ensuring that the placeholder is replaced with the appropriate commit message.
#### Committing with the Message
Once users have generated their desired commit message, they can proceed to commit using the generated message. By understanding the feature's full potential and its implementation details, users can confidently use the generated messages for their commits.
### Ignore files
You can remove files from being sent to OpenAI by creating a `.opencommitignore` file. For example:
You can ignore files from submission to OpenAI by creating a `.opencommitignore` file. For example:
```ignorelang
path/to/large-asset.zip
**/*.jpg
```
This helps prevent opencommit from uploading artifacts and large files.
This is useful for preventing opencommit from uploading artifacts and large files.
By default, opencommit ignores files matching: `*-lock.*` and `*.lock`
## Git hook (KILLER FEATURE)
## Git hook
You can set OpenCommit as Git [`prepare-commit-msg`](https://git-scm.com/docs/githooks#_prepare_commit_msg) hook. Hook integrates with your IDE Source Control and allows you to edit the message before committing.
You can set OpenCommit as Git [`prepare-commit-msg`](https://git-scm.com/docs/githooks#_prepare_commit_msg) hook. Hook integrates with you IDE Source Control and allows you edit the message before commit.
To set the hook:
```sh
oco hook set
oc hook set
```
To unset the hook:
```sh
oco hook unset
oc hook unset
```
To use the hook:
@@ -269,67 +155,6 @@ git commit
Or follow the process of your IDE Source Control feature, when it calls `git commit` command — OpenCommit will integrate into the flow.
## Setup OpenCommit as a GitHub Action (BETA) 🔥
OpenCommit is now available as a GitHub Action which automatically improves all new commits messages when you push to remote!
This is great if you want to make sure all of the commits in all of your repository branches are meaningful and not lame like `fix1` or `done2`.
Create a file `.github/workflows/opencommit.yml` with the contents below:
```yml
name: 'OpenCommit Action'
on:
push:
# this list of branches is often enough,
# but you may still ignore other public branches
branches-ignore: [main master dev development release]
jobs:
opencommit:
timeout-minutes: 10
name: OpenCommit
runs-on: ubuntu-latest
permissions: write-all
steps:
- name: Setup Node.js Environment
uses: actions/setup-node@v2
with:
node-version: '16'
- uses: actions/checkout@v3
with:
fetch-depth: 0
- uses: di-sukharev/opencommit@github-action-v1.0.4
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
env:
# set openAI api key in repo actions secrets,
# for openAI keys go to: https://platform.openai.com/account/api-keys
# for repo secret go to: <your_repo_url>/settings/secrets/actions
OCO_OPENAI_API_KEY: ${{ secrets.OCO_OPENAI_API_KEY }}
# customization
OCO_OPENAI_MAX_TOKENS: 500
OCO_OPENAI_BASE_PATH: ''
OCO_DESCRIPTION: false
OCO_EMOJI: false
OCO_MODEL: gpt-3.5-turbo-16k
OCO_LANGUAGE: en
OCO_PROMPT_MODULE: conventional-commit
```
That is it. Now when you push to any branch in your repo — all NEW commits are being improved by your never-tired AI.
Make sure you exclude public collaboration branches (`main`, `dev`, `etc`) in `branches-ignore`, so OpenCommit does not rebase commits there while improving the messages.
Interactive rebase (`rebase -i`) changes commits' SHA, so the commit history in remote becomes different from your local branch history. This is okay if you work on the branch alone, but may be inconvenient for other collaborators.
## Payments
You pay for your requests to OpenAI API on your own.
OpenCommit stores your key locally.
OpenCommit by default uses ChatGPT (3.5-turbo-16k) official model, which is a lot cheaper than gpt-4.
You pay for your own requests to OpenAI API. OpenCommit uses ChatGPT (3.5-turbo) official model, that is ~15x times cheaper than GPT-4.

View File

@@ -1,29 +0,0 @@
name: 'OpenCommit — improve commits with AI 🧙'
description: 'Replaces lame commit messages with meaningful AI-generated messages when you push to remote'
author: 'https://github.com/di-sukharev'
repo: 'https://github.com/di-sukharev/opencommit/tree/github-action'
branding:
icon: 'git-commit'
color: 'green'
keywords:
[
'git',
'chatgpt',
'gpt',
'ai',
'openai',
'opencommit',
'aicommit',
'aicommits',
'gptcommit',
'commit'
]
inputs:
GITHUB_TOKEN:
description: 'GitHub token'
required: true
runs:
using: 'node16'
main: 'out/github-action.cjs'

View File

@@ -1,24 +1,14 @@
import { build } from 'esbuild';
import fs from 'fs';
import { build } from 'esbuild'
import fs from 'fs'
await build({
entryPoints: ['./src/cli.ts'],
bundle: true,
platform: 'node',
format: 'cjs',
outfile: './out/cli.cjs'
entryPoints: ['./src/cli.ts'],
bundle: true,
platform: 'node',
format: 'cjs',
outfile: './out/cli.cjs',
});
await build({
entryPoints: ['./src/github-action.ts'],
bundle: true,
platform: 'node',
format: 'cjs',
outfile: './out/github-action.cjs'
});
const wasmFile = fs.readFileSync('./node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm')
const wasmFile = fs.readFileSync(
'./node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm'
);
fs.writeFileSync('./out/tiktoken_bg.wasm', wasmFile);
fs.writeFileSync('./out/tiktoken_bg.wasm', wasmFile)

22670
out/cli.cjs

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

480
package-lock.json generated
View File

@@ -1,25 +1,19 @@
{
"name": "opencommit",
"version": "2.4.2",
"version": "2.0.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "opencommit",
"version": "2.4.2",
"version": "2.0.1",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.1.1",
"@clack/prompts": "^0.6.1",
"@dqbd/tiktoken": "^1.0.2",
"@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0",
"axios": "^1.3.4",
"chalk": "^5.2.0",
"cleye": "^1.3.2",
"crypto": "^1.0.1",
"execa": "^7.0.0",
"ignore": "^5.2.4",
"ini": "^3.0.1",
@@ -27,11 +21,10 @@
"openai": "^3.2.1"
},
"bin": {
"oco": "out/cli.cjs",
"oc": "out/cli.cjs",
"opencommit": "out/cli.cjs"
},
"devDependencies": {
"@commitlint/types": "^17.4.4",
"@types/ini": "^1.3.31",
"@types/inquirer": "^9.0.3",
"@types/node": "^16.18.14",
@@ -45,112 +38,6 @@
"typescript": "^4.9.3"
}
},
"node_modules/@actions/core": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"dependencies": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
}
},
"node_modules/@actions/exec": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
"integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
"dependencies": {
"@actions/io": "^1.0.1"
}
},
"node_modules/@actions/github": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/@actions/github/-/github-5.1.1.tgz",
"integrity": "sha512-Nk59rMDoJaV+mHCOJPXuvB1zIbomlKS0dmSIqPGxd0enAXBnOfn4VWF+CGtRCwXZG9Epa54tZA7VIRlJDS8A6g==",
"dependencies": {
"@actions/http-client": "^2.0.1",
"@octokit/core": "^3.6.0",
"@octokit/plugin-paginate-rest": "^2.17.0",
"@octokit/plugin-rest-endpoint-methods": "^5.13.0"
}
},
"node_modules/@actions/github/node_modules/@octokit/auth-token": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz",
"integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==",
"dependencies": {
"@octokit/types": "^6.0.3"
}
},
"node_modules/@actions/github/node_modules/@octokit/core": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.6.0.tgz",
"integrity": "sha512-7RKRKuA4xTjMhY+eG3jthb3hlZCsOwg3rztWh75Xc+ShDWOfDDATWbeZpAHBNRpm4Tv9WgBMOy1zEJYXG6NJ7Q==",
"dependencies": {
"@octokit/auth-token": "^2.4.4",
"@octokit/graphql": "^4.5.8",
"@octokit/request": "^5.6.3",
"@octokit/request-error": "^2.0.5",
"@octokit/types": "^6.0.3",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
}
},
"node_modules/@actions/github/node_modules/@octokit/endpoint": {
"version": "6.0.12",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.12.tgz",
"integrity": "sha512-lF3puPwkQWGfkMClXb4k/eUT/nZKQfxinRWJrdZaJO85Dqwo/G0yOC434Jr2ojwafWJMYqFGFa5ms4jJUgujdA==",
"dependencies": {
"@octokit/types": "^6.0.3",
"is-plain-object": "^5.0.0",
"universal-user-agent": "^6.0.0"
}
},
"node_modules/@actions/github/node_modules/@octokit/graphql": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.8.0.tgz",
"integrity": "sha512-0gv+qLSBLKF0z8TKaSKTsS39scVKF9dbMxJpj3U0vC7wjNWFuIpL/z76Qe2fiuCbDRcJSavkXsVtMS6/dtQQsg==",
"dependencies": {
"@octokit/request": "^5.6.0",
"@octokit/types": "^6.0.3",
"universal-user-agent": "^6.0.0"
}
},
"node_modules/@actions/github/node_modules/@octokit/request": {
"version": "5.6.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.3.tgz",
"integrity": "sha512-bFJl0I1KVc9jYTe9tdGGpAMPy32dLBXXo1dS/YwSCTL/2nd9XeHsY616RE3HPXDVk+a+dBuzyz5YdlXwcDTr2A==",
"dependencies": {
"@octokit/endpoint": "^6.0.1",
"@octokit/request-error": "^2.1.0",
"@octokit/types": "^6.16.1",
"is-plain-object": "^5.0.0",
"node-fetch": "^2.6.7",
"universal-user-agent": "^6.0.0"
}
},
"node_modules/@actions/github/node_modules/@octokit/request-error": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.1.0.tgz",
"integrity": "sha512-1VIvgXxs9WHSjicsRwq8PlR2LR2x6DwsJAaFgzdi0JfJoGSO8mYI/cHJQ+9FbN21aa+DrgNLnwObmyeSC8Rmpg==",
"dependencies": {
"@octokit/types": "^6.0.3",
"deprecation": "^2.0.0",
"once": "^1.4.0"
}
},
"node_modules/@actions/http-client": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz",
"integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==",
"dependencies": {
"tunnel": "^0.0.6"
}
},
"node_modules/@actions/io": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
"integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q=="
},
"node_modules/@clack/core": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/@clack/core/-/core-0.3.2.tgz",
@@ -185,49 +72,6 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@commitlint/types": {
"version": "17.4.4",
"resolved": "https://registry.npmjs.org/@commitlint/types/-/types-17.4.4.tgz",
"integrity": "sha512-amRN8tRLYOsxRr6mTnGGGvB5EmW/4DDjLMgiwK3CCVEmN6Sr/6xePGEpWaspKkckILuUORCwe6VfDBw6uj4axQ==",
"dev": true,
"dependencies": {
"chalk": "^4.1.0"
},
"engines": {
"node": ">=v14"
}
},
"node_modules/@commitlint/types/node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"dependencies": {
"color-convert": "^2.0.1"
},
"engines": {
"node": ">=8"
},
"funding": {
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
}
},
"node_modules/@commitlint/types/node_modules/chalk": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"dependencies": {
"ansi-styles": "^4.1.0",
"supports-color": "^7.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
@@ -402,231 +246,6 @@
"node": ">= 8"
}
},
"node_modules/@octokit/auth-token": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.3.tgz",
"integrity": "sha512-/aFM2M4HVDBT/jjDBa84sJniv1t9Gm/rLkalaz9htOm+L+8JMj1k9w0CkUdcxNyNxZPlTxKPVko+m1VlM58ZVA==",
"peer": true,
"dependencies": {
"@octokit/types": "^9.0.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/auth-token/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/auth-token/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/core": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.0.tgz",
"integrity": "sha512-AgvDRUg3COpR82P7PBdGZF/NNqGmtMq2NiPqeSsDIeCfYFOZ9gddqWNQHnFdEUf+YwOj4aZYmJnlPp7OXmDIDg==",
"peer": true,
"dependencies": {
"@octokit/auth-token": "^3.0.0",
"@octokit/graphql": "^5.0.0",
"@octokit/request": "^6.0.0",
"@octokit/request-error": "^3.0.0",
"@octokit/types": "^9.0.0",
"before-after-hook": "^2.2.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/core/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/endpoint": {
"version": "7.0.5",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.5.tgz",
"integrity": "sha512-LG4o4HMY1Xoaec87IqQ41TQ+glvIeTKqfjkCEmt5AIwDZJwQeVZFIEYXrYY6yLwK+pAScb9Gj4q+Nz2qSw1roA==",
"peer": true,
"dependencies": {
"@octokit/types": "^9.0.0",
"is-plain-object": "^5.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/graphql": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.5.tgz",
"integrity": "sha512-Qwfvh3xdqKtIznjX9lz2D458r7dJPP8l6r4GQkIdWQouZwHQK0mVT88uwiU2bdTU2OtT1uOlKpRciUWldpG0yQ==",
"peer": true,
"dependencies": {
"@octokit/request": "^6.0.0",
"@octokit/types": "^9.0.0",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/openapi-types": {
"version": "12.11.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-12.11.0.tgz",
"integrity": "sha512-VsXyi8peyRq9PqIz/tpqiL2w3w80OgVMwBHltTml3LmVvXiphgeqmY9mvBw9Wu7e0QWk/fqD37ux8yP5uVekyQ=="
},
"node_modules/@octokit/plugin-paginate-rest": {
"version": "2.21.3",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.21.3.tgz",
"integrity": "sha512-aCZTEf0y2h3OLbrgKkrfFdjRL6eSOo8komneVQJnYecAxIej7Bafor2xhuDJOIFau4pk0i/P28/XgtbyPF0ZHw==",
"dependencies": {
"@octokit/types": "^6.40.0"
},
"peerDependencies": {
"@octokit/core": ">=2"
}
},
"node_modules/@octokit/plugin-rest-endpoint-methods": {
"version": "5.16.2",
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.16.2.tgz",
"integrity": "sha512-8QFz29Fg5jDuTPXVtey05BLm7OB+M8fnvE64RNegzX7U+5NUXcOcnpTIK0YfSHBg8gYd0oxIq3IZTe9SfPZiRw==",
"dependencies": {
"@octokit/types": "^6.39.0",
"deprecation": "^2.3.1"
},
"peerDependencies": {
"@octokit/core": ">=3"
}
},
"node_modules/@octokit/request": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.3.tgz",
"integrity": "sha512-TNAodj5yNzrrZ/VxP+H5HiYaZep0H3GU0O7PaF+fhDrt8FPrnkei9Aal/txsN/1P7V3CPiThG0tIvpPDYUsyAA==",
"peer": true,
"dependencies": {
"@octokit/endpoint": "^7.0.0",
"@octokit/request-error": "^3.0.0",
"@octokit/types": "^9.0.0",
"is-plain-object": "^5.0.0",
"node-fetch": "^2.6.7",
"universal-user-agent": "^6.0.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/request-error": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz",
"integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==",
"peer": true,
"dependencies": {
"@octokit/types": "^9.0.0",
"deprecation": "^2.0.0",
"once": "^1.4.0"
},
"engines": {
"node": ">= 14"
}
},
"node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
"version": "17.1.2",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-17.1.2.tgz",
"integrity": "sha512-OaS7Ol4Y+U50PbejfzQflGWRMxO04nYWO5ZBv6JerqMKE2WS/tI9VoVDDPXHBlRMGG2fOdKwtVGlFfc7AVIstw==",
"peer": true
},
"node_modules/@octokit/request/node_modules/@octokit/types": {
"version": "9.2.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.2.2.tgz",
"integrity": "sha512-9BjDxjgQIvCjNWZsbqyH5QC2Yni16oaE6xL+8SUBMzcYPF4TGQBXGA97Cl3KceK9mwiNMb1mOYCz6FbCCLEL+g==",
"peer": true,
"dependencies": {
"@octokit/openapi-types": "^17.1.2"
}
},
"node_modules/@octokit/types": {
"version": "6.41.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.41.0.tgz",
"integrity": "sha512-eJ2jbzjdijiL3B4PrSQaSjuF2sPEQPVCPzBvTHJD9Nz+9dw2SGH4K4xeQJ77YfTq5bRQ+bD8wT11JbeDPmxmGg==",
"dependencies": {
"@octokit/openapi-types": "^12.11.0"
}
},
"node_modules/@octokit/webhooks-schemas": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/@octokit/webhooks-schemas/-/webhooks-schemas-6.11.0.tgz",
"integrity": "sha512-ekca2jZhb2vfQy43rjvJoV77IwEKvA42BmJ2m8H3WaNfG9BF05RodnFjh3MSOksNseoNO8w8IPLZ3d5546NH2w=="
},
"node_modules/@octokit/webhooks-types": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-6.11.0.tgz",
"integrity": "sha512-AanzbulOHljrku1NGfafxdpTCfw2ENaWzH01N2vqQM+cUFbk868Cgh0xylz0JIM9BoKbfI++bdD6EYX0Q/UTEw=="
},
"node_modules/@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
@@ -1034,11 +653,6 @@
}
]
},
"node_modules/before-after-hook": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
"integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ=="
},
"node_modules/bl": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz",
@@ -1226,12 +840,6 @@
"node": ">= 8"
}
},
"node_modules/crypto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/crypto/-/crypto-1.0.1.tgz",
"integrity": "sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==",
"deprecated": "This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in."
},
"node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
@@ -1274,11 +882,6 @@
"node": ">=0.4.0"
}
},
"node_modules/deprecation": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
@@ -2436,14 +2039,6 @@
"node": ">=8"
}
},
"node_modules/is-plain-object": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
"integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/is-stream": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz",
@@ -2669,25 +2264,6 @@
"integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==",
"dev": true
},
"node_modules/node-fetch": {
"version": "2.6.11",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.11.tgz",
"integrity": "sha512-4I6pdBY1EthSqDmJkiNk3JIT8cswwR9nfeW/cPdUagJYEQG7R95WRH74wpz7ma8Gh/9dI9FP+OU+0E4FvtA55w==",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/npm-run-path": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.1.0.tgz",
@@ -2717,6 +2293,7 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dev": true,
"dependencies": {
"wrappy": "1"
}
@@ -3136,9 +2713,9 @@
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/semver": {
"version": "7.5.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz",
"integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==",
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"dev": true,
"dependencies": {
"lru-cache": "^6.0.0"
@@ -3326,11 +2903,6 @@
"node": ">=8.0"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="
},
"node_modules/ts-node": {
"version": "10.9.1",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz",
@@ -3400,14 +2972,6 @@
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==",
"dev": true
},
"node_modules/tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
"engines": {
"node": ">=0.6.11 <=0.7.0 || >=0.7.3"
}
},
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
@@ -3453,11 +3017,6 @@
"node": ">=4.2.0"
}
},
"node_modules/universal-user-agent": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
},
"node_modules/uri-js": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
@@ -3472,14 +3031,6 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
@@ -3494,20 +3045,6 @@
"defaults": "^1.0.3"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -3575,7 +3112,8 @@
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
"dev": true
},
"node_modules/yallist": {
"version": "4.0.0",

View File

@@ -1,7 +1,7 @@
{
"name": "opencommit",
"version": "3.0.3",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"version": "2.0.1",
"description": "GPT CLI to auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",
"chatgpt",
@@ -17,6 +17,7 @@
"main": "cli.js",
"bin": {
"opencommit": "./out/cli.cjs",
"oc": "./out/cli.cjs",
"oco": "./out/cli.cjs"
},
"repository": {
@@ -26,8 +27,7 @@
"author": "https://github.com/di-sukharev",
"license": "MIT",
"files": [
"out/cli.cjs",
"out/tiktoken_bg.wasm"
"out/**/*"
],
"release": {
"branches": [
@@ -42,13 +42,11 @@
"start": "node ./out/cli.cjs",
"dev": "ts-node ./src/cli.ts",
"build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
"deploy": "npm run build:push && npm version patch && git push --follow-tags && npm publish --tag latest",
"deploy": "npm run build && npm version patch && npm publish --tag latest",
"lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src"
},
"devDependencies": {
"@commitlint/types": "^17.4.4",
"@types/ini": "^1.3.31",
"@types/inquirer": "^9.0.3",
"@types/node": "^16.18.14",
@@ -62,17 +60,11 @@
"typescript": "^4.9.3"
},
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.1.1",
"@clack/prompts": "^0.6.1",
"@dqbd/tiktoken": "^1.0.2",
"@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0",
"axios": "^1.3.4",
"chalk": "^5.2.0",
"cleye": "^1.3.2",
"crypto": "^1.0.1",
"execa": "^7.0.0",
"ignore": "^5.2.4",
"ini": "^3.0.1",

View File

@@ -1,5 +1,4 @@
export enum COMMANDS {
config = 'config',
hook = 'hook',
commitlint = 'commitlint'
config = 'config'
}

View File

@@ -1,27 +1,18 @@
import { intro, outro } from '@clack/prompts';
import axios from 'axios';
import chalk from 'chalk';
import { execa } from 'execa';
import {
ChatCompletionRequestMessage,
Configuration as OpenAiApiConfiguration,
OpenAIApi
} from 'openai';
import { intro, outro } from '@clack/prompts';
import {
CONFIG_MODES,
DEFAULT_MODEL_TOKEN_LIMIT,
getConfig
} from './commands/config';
import { GenerateCommitMessageErrorEnum } from './generateCommitMessageFromGitDiff';
import { tokenCount } from './utils/tokenCount';
import { CONFIG_MODES, getConfig } from './commands/config';
const config = getConfig();
let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
let basePath = config?.OCO_OPENAI_BASE_PATH;
let apiKey = config?.OCO_OPENAI_API_KEY;
let apiKey = config?.OPENAI_API_KEY;
let basePath = config?.OPENAI_BASE_PATH;
const [command, mode] = process.argv.slice(2);
@@ -29,7 +20,7 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
intro('opencommit');
outro(
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
'OPENAI_API_KEY is not set, please run `oc config set OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
@@ -38,8 +29,6 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
process.exit(1);
}
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
class OpenAi {
private openAiApiConfiguration = new OpenAiApiConfiguration({
apiKey: apiKey
@@ -56,32 +45,20 @@ class OpenAi {
public generateCommitMessage = async (
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> => {
const params = {
model: MODEL,
messages,
temperature: 0,
top_p: 0.1,
max_tokens: maxTokens || 500
};
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content) + 4)
.reduce((a, b) => a + b, 0);
if (REQUEST_TOKENS > DEFAULT_MODEL_TOKEN_LIMIT - maxTokens) {
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
}
const { data } = await this.openAI.createChatCompletion(params);
const { data } = await this.openAI.createChatCompletion({
model: 'gpt-3.5-turbo',
messages,
temperature: 0,
top_p: 0.1,
max_tokens: 196
});
const message = data.choices[0].message;
return message?.content;
} catch (error) {
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);
} catch (error: unknown) {
outro(`${chalk.red('✖')} ${error}`);
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
@@ -95,7 +72,7 @@ class OpenAi {
);
}
throw err;
process.exit(1);
}
};
}
@@ -104,8 +81,10 @@ export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
const { data } = await axios.get(
'https://unpkg.com/opencommit/package.json'
);
return data.version;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;

View File

@@ -1,14 +1,13 @@
#!/usr/bin/env node
import { cli } from 'cleye';
import packageJSON from '../package.json' assert { type: 'json' };
import packageJSON from '../package.json';
import { commit } from './commands/commit';
import { commitlintConfigCommand } from './commands/commitlint';
import { configCommand } from './commands/config';
import { hookCommand, isHookCalled } from './commands/githook.js';
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
import { commit } from './commands/commit';
// import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
const extraArgs = process.argv.slice(2);
@@ -16,13 +15,13 @@ cli(
{
version: packageJSON.version,
name: 'opencommit',
commands: [configCommand, hookCommand, commitlintConfigCommand],
commands: [configCommand, hookCommand],
flags: {},
ignoreArgv: (type) => type === 'unknown-flag' || type === 'argument',
help: { description: packageJSON.description }
},
async () => {
await checkIsLatestVersion();
// await checkIsLatestVersion();
if (await isHookCalled()) {
prepareCommitMessageHook();

View File

@@ -1,9 +0,0 @@
# @commitlint Module for opencommit
1. Load commitlint configuration within tree.
2. Generate a commit with commitlint prompt:
- Will not run if hash is the same.
- Infer a prompt for each commitlint rule.
- Ask OpenAI to generate consistency with embedded commitlint rules.
- Store configuration close to commitlint configuration.
3. Replace conventional-commit prompt with commitlint prompt.

View File

@@ -1,17 +1,8 @@
import chalk from 'chalk';
import { execa } from 'execa';
import {
confirm,
intro,
isCancel,
multiselect,
outro,
select,
spinner
} from '@clack/prompts';
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
GenerateCommitMessageErrorEnum,
generateCommitMessageWithChatCompletion
} from '../generateCommitMessageFromGitDiff';
import {
assertGitRepo,
getChangedFiles,
@@ -19,136 +10,128 @@ import {
getStagedFiles,
gitAdd
} from '../utils/git';
import {
spinner,
confirm,
outro,
isCancel,
intro,
multiselect,
select
} from '@clack/prompts';
import chalk from 'chalk';
import { trytm } from '../utils/trytm';
import { getConfig } from './config';
const config = getConfig();
const getGitRemotes = async () => {
const { stdout } = await execa('git', ['remote']);
return stdout.split('\n').filter((remote) => Boolean(remote.trim()));
};
// Check for the presence of message templates
const checkMessageTemplate = (extraArgs: string[]): string | false => {
for (const key in extraArgs) {
if (extraArgs[key].includes(config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER))
return extraArgs[key];
}
return false;
};
const generateCommitMessageFromGitDiff = async (
diff: string,
extraArgs: string[]
): Promise<void> => {
await assertGitRepo();
const commitSpinner = spinner();
commitSpinner.start('Generating the commit message');
const commitMessage = await generateCommitMessageWithChatCompletion(diff);
try {
let commitMessage = await generateCommitMessageByDiff(diff);
// TODO: show proper error messages
if (typeof commitMessage !== 'string') {
const errorMessages = {
[GenerateCommitMessageErrorEnum.emptyMessage]:
'empty openAI response, weird, try again',
[GenerateCommitMessageErrorEnum.internalError]:
'internal error, try again',
[GenerateCommitMessageErrorEnum.tooMuchTokens]:
'too much tokens in git diff, stage and commit files in parts'
};
const messageTemplate = checkMessageTemplate(extraArgs);
if (
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER &&
typeof messageTemplate === 'string'
) {
commitMessage = messageTemplate.replace(
config?.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
commitMessage
);
}
outro(`${chalk.red('✖')} ${errorMessages[commitMessage.error]}`);
process.exit(1);
}
commitSpinner.stop('📝 Commit message generated');
commitSpinner.stop('📝 Commit message generated');
outro(
`Generated commit message:
outro(
`Commit message:
${chalk.grey('——————————————————')}
${commitMessage}
${chalk.grey('——————————————————')}`
);
);
const isCommitConfirmedByUser = await confirm({
message: 'Confirm the commit message?'
});
const isCommitConfirmedByUser = await confirm({
message: 'Confirm the commit message?'
});
if (isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
const { stdout } = await execa('git', [
'commit',
'-m',
commitMessage,
...extraArgs
]);
if (isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
const { stdout } = await execa('git', [
'commit',
'-m',
commitMessage,
...extraArgs
]);
outro(`${chalk.green('✔')} Successfully committed`);
outro(`${chalk.green('✔')} successfully committed`);
outro(stdout);
outro(stdout);
const remotes = await getGitRemotes();
const remotes = await getGitRemotes();
if (!remotes.length) {
const { stdout } = await execa('git', ['push']);
if (stdout) outro(stdout);
process.exit(0);
}
if (remotes.length === 1) {
const isPushConfirmedByUser = await confirm({
message: 'Do you want to run `git push`?'
});
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
const pushSpinner = spinner();
pushSpinner.start(`Running \`git push ${remotes[0]}\``);
const { stdout } = await execa('git', [
'push',
'--verbose',
remotes[0]
]);
pushSpinner.stop(
`${chalk.green('✔')} successfully pushed all commits to ${remotes[0]}`
);
if (!remotes.length) {
const { stdout } = await execa('git', ['push']);
if (stdout) outro(stdout);
} else {
outro('`git push` aborted');
process.exit(0);
}
} else {
const selectedRemote = (await select({
message: 'Choose a remote to push to',
options: remotes.map((remote) => ({ value: remote, label: remote }))
})) as string;
if (remotes.length === 1) {
const isPushConfirmedByUser = await confirm({
message: 'Do you want to run `git push`?'
});
if (!isCancel(selectedRemote)) {
const pushSpinner = spinner();
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
const pushSpinner = spinner();
pushSpinner.start(`Running \`git push ${selectedRemote}\``);
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
const { stdout } = await execa('git', ['push', selectedRemote]);
const { stdout } = await execa('git', [
'push',
'--verbose',
remotes[0]
]);
pushSpinner.stop(
`${chalk.green(
'✔'
)} successfully pushed all commits to ${selectedRemote}`
);
pushSpinner.stop(
`${chalk.green('')} Successfully pushed all commits to ${
remotes[0]
}`
);
if (stdout) outro(stdout);
} else {
outro('`git push` aborted');
process.exit(0);
}
} else {
const selectedRemote = (await select({
message: 'Choose a remote to push to',
options: remotes.map((remote) => ({ value: remote, label: remote }))
})) as string;
if (!isCancel(selectedRemote)) {
const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
const { stdout } = await execa('git', ['push', selectedRemote]);
pushSpinner.stop(
`${chalk.green(
'✔'
)} Successfully pushed all commits to ${selectedRemote}`
);
if (stdout) outro(stdout);
} else outro(`${chalk.gray('✖')} process cancelled`);
}
if (stdout) outro(stdout);
} else outro(`${chalk.gray('')} process cancelled`);
}
} catch (error) {
commitSpinner.stop('📝 Commit message generated');
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);
process.exit(1);
}
};
@@ -161,7 +144,7 @@ export async function commit(
if (changedFiles) await gitAdd({ files: changedFiles });
else {
outro('No changes detected, write some code and run `oco` again');
outro('No changes detected, write some code and run `oc` again');
process.exit(1);
}
}

View File

@@ -1,46 +0,0 @@
import chalk from 'chalk';
import { command } from 'cleye';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum';
import { configureCommitlintIntegration } from '../modules/commitlint/config';
import { getCommitlintLLMConfig } from '../modules/commitlint/utils';
export enum CONFIG_MODES {
get = 'get',
force = 'force'
}
export const commitlintConfigCommand = command(
{
name: COMMANDS.commitlint,
parameters: ['<mode>']
},
async (argv) => {
intro('opencommit — configure @commitlint');
try {
const { mode } = argv._;
if (mode === CONFIG_MODES.get) {
const commitLintConfig = await getCommitlintLLMConfig();
outro(commitLintConfig.toString());
return;
}
if (mode === CONFIG_MODES.force) {
await configureCommitlintIntegration(true);
return;
}
throw new Error(
`Unsupported mode: ${mode}. Valid modes are: "force" and "get"`
);
} catch (error) {
outro(`${chalk.red('✖')} ${error}`);
process.exit(1);
}
}
);

View File

@@ -1,32 +1,21 @@
import chalk from 'chalk';
import { command } from 'cleye';
import * as dotenv from 'dotenv';
import { existsSync, readFileSync, writeFileSync } from 'fs';
import { parse as iniParse, stringify as iniStringify } from 'ini';
import { homedir } from 'os';
import { join as pathJoin } from 'path';
import { parse as iniParse, stringify as iniStringify } from 'ini';
import { existsSync, writeFileSync, readFileSync } from 'fs';
import { homedir } from 'os';
import { intro, outro } from '@clack/prompts';
import chalk from 'chalk';
import { COMMANDS } from '../CommandsEnum';
import { getI18nLocal } from '../i18n';
dotenv.config();
export enum CONFIG_KEYS {
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
OCO_OPENAI_MAX_TOKENS = 'OCO_OPENAI_MAX_TOKENS',
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
OCO_DESCRIPTION = 'OCO_DESCRIPTION',
OCO_EMOJI = 'OCO_EMOJI',
OCO_MODEL = 'OCO_MODEL',
OCO_LANGUAGE = 'OCO_LANGUAGE',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE'
OPENAI_API_KEY = 'OPENAI_API_KEY',
OPENAI_BASE_PATH = 'OPENAI_BASE_PATH',
description = 'description',
emoji = 'emoji',
language = 'language'
}
export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;
export enum CONFIG_MODES {
get = 'get',
set = 'set'
@@ -47,25 +36,25 @@ const validateConfig = (
};
export const configValidators = {
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config?: any) {
validateConfig(CONFIG_KEYS.OCO_OPENAI_API_KEY, value, 'Cannot be empty');
[CONFIG_KEYS.OPENAI_API_KEY](value: any) {
validateConfig(CONFIG_KEYS.OPENAI_API_KEY, value, 'Cannot be empty');
validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY,
CONFIG_KEYS.OPENAI_API_KEY,
value.startsWith('sk-'),
'Must start with "sk-"'
);
validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY,
config[CONFIG_KEYS.OCO_OPENAI_BASE_PATH] || value.length === 51,
CONFIG_KEYS.OPENAI_API_KEY,
value.length === 51,
'Must be 51 characters long'
);
return value;
},
[CONFIG_KEYS.OCO_DESCRIPTION](value: any) {
[CONFIG_KEYS.description](value: any) {
validateConfig(
CONFIG_KEYS.OCO_DESCRIPTION,
CONFIG_KEYS.description,
typeof value === 'boolean',
'Must be true or false'
);
@@ -73,28 +62,9 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS](value: any) {
// If the value is a string, convert it to a number.
if (typeof value === 'string') {
value = parseInt(value);
validateConfig(
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
!isNaN(value),
'Must be a number'
);
}
[CONFIG_KEYS.emoji](value: any) {
validateConfig(
CONFIG_KEYS.OCO_OPENAI_MAX_TOKENS,
value ? typeof value === 'number' : undefined,
'Must be a number'
);
return value;
},
[CONFIG_KEYS.OCO_EMOJI](value: any) {
validateConfig(
CONFIG_KEYS.OCO_EMOJI,
CONFIG_KEYS.emoji,
typeof value === 'boolean',
'Must be true or false'
);
@@ -102,53 +72,21 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_LANGUAGE](value: any) {
[CONFIG_KEYS.language](value: any) {
validateConfig(
CONFIG_KEYS.OCO_LANGUAGE,
CONFIG_KEYS.language,
getI18nLocal(value),
`${value} is not supported yet`
);
return getI18nLocal(value);
},
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH](value: any) {
[CONFIG_KEYS.OPENAI_BASE_PATH](value: any) {
validateConfig(
CONFIG_KEYS.OCO_OPENAI_BASE_PATH,
typeof value === 'string',
'Must be string'
CONFIG_KEYS.OPENAI_BASE_PATH,
typeof value == 'string',
`${value} is not supported yet`
);
return value;
},
[CONFIG_KEYS.OCO_MODEL](value: any) {
validateConfig(
CONFIG_KEYS.OCO_MODEL,
[
'gpt-3.5-turbo',
'gpt-4',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613'
].includes(value),
`${value} is not supported yet, use 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'`
);
return value;
},
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
value.startsWith('$'),
`${value} must start with $, for example: '$msg'`
);
return value;
},
[CONFIG_KEYS.OCO_PROMPT_MODULE](value: any) {
validateConfig(
CONFIG_KEYS.OCO_PROMPT_MODULE,
['conventional-commit', '@commitlint'].includes(value),
`${value} is not supported yet, use '@commitlint' or 'conventional-commit' (default)`
);
return value;
}
};
@@ -160,52 +98,18 @@ export type ConfigType = {
const configPath = pathJoin(homedir(), '.opencommit');
export const getConfig = (): ConfigType | null => {
const configFromEnv = {
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
OCO_OPENAI_MAX_TOKENS: process.env.OCO_OPENAI_MAX_TOKENS
? Number(process.env.OCO_OPENAI_MAX_TOKENS)
: undefined,
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,
OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false,
OCO_MODEL: process.env.OCO_MODEL || 'gpt-3.5-turbo-16k',
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit'
};
const configExists = existsSync(configPath);
if (!configExists) return configFromEnv;
if (!configExists) return null;
const configFile = readFileSync(configPath, 'utf8');
const config = iniParse(configFile);
for (const configKey of Object.keys(config)) {
if (
!config[configKey] ||
['null', 'undefined'].includes(config[configKey])
) {
config[configKey] = undefined;
continue;
}
try {
const validator = configValidators[configKey as CONFIG_KEYS];
const validValue = validator(
config[configKey] ?? configFromEnv[configKey as CONFIG_KEYS],
config
);
const validValue = configValidators[configKey as CONFIG_KEYS](
config[configKey]
);
config[configKey] = validValue;
} catch (error) {
outro(
`'${configKey}' name is invalid, it should be either 'OCO_${configKey.toUpperCase()}' or it doesn't exist.`
);
outro(
`Manually fix the '.env' file or global '~/.opencommit' config file.`
);
process.exit(1);
}
config[configKey] = validValue;
}
return config;
@@ -234,7 +138,7 @@ export const setConfig = (keyValues: [key: string, value: string][]) => {
writeFileSync(configPath, iniStringify(config), 'utf8');
outro(`${chalk.green('✔')} Config successfully set`);
outro(`${chalk.green('✔')} config successfully set`);
};
export const configCommand = command(

View File

@@ -1,13 +1,11 @@
import chalk from 'chalk';
import { command } from 'cleye';
import { existsSync } from 'fs';
import fs from 'fs/promises';
import path from 'path';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum.js';
import { command } from 'cleye';
import { assertGitRepo, getCoreHooksPath } from '../utils/git.js';
import { existsSync } from 'fs';
import chalk from 'chalk';
import { intro, outro } from '@clack/prompts';
import { COMMANDS } from '../CommandsEnum.js';
const HOOK_NAME = 'prepare-commit-msg';
const DEFAULT_SYMLINK_URL = path.join('.git', 'hooks', HOOK_NAME);
@@ -94,7 +92,7 @@ export const hookCommand = command(
}
throw new Error(
`Unsupported mode: ${mode}. Supported modes are: 'set' or 'unset'`
`unsupported mode: ${mode}. Supported modes are: 'set' or 'unset'`
);
} catch (error) {
outro(`${chalk.red('✖')} ${error}`);

View File

@@ -1,11 +1,9 @@
import chalk from 'chalk';
import fs from 'fs/promises';
import chalk from 'chalk';
import { intro, outro, spinner } from '@clack/prompts';
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
import { getChangedFiles, getDiff, getStagedFiles, gitAdd } from '../utils/git';
import { getConfig } from './config';
import { generateCommitMessageWithChatCompletion } from '../generateCommitMessageFromGitDiff';
const [messageFilePath, commitSource] = process.argv.slice(2);
@@ -26,7 +24,7 @@ export const prepareCommitMessageHook = async (
if (changedFiles) await gitAdd({ files: changedFiles });
else {
outro('No changes detected, write some code and run `oco` again');
outro('No changes detected, write some code and run `oc` again');
process.exit(1);
}
}
@@ -39,7 +37,7 @@ export const prepareCommitMessageHook = async (
const config = getConfig();
if (!config?.OCO_OPENAI_API_KEY) {
if (!config?.OPENAI_API_KEY) {
throw new Error(
'No OPEN_AI_API exists. Set your OPEN_AI_API=<key> in ~/.opencommit'
);
@@ -47,11 +45,13 @@ export const prepareCommitMessageHook = async (
const spin = spinner();
spin.start('Generating commit message');
const commitMessage = await generateCommitMessageByDiff(
const commitMessage = await generateCommitMessageWithChatCompletion(
await getDiff({ files: staged })
);
spin.stop('Done');
if (typeof commitMessage !== 'string') {
spin.stop('Error');
throw new Error(commitMessage.error);
} else spin.stop('Done');
const fileContent = await fs.readFile(messageFilePath);

View File

@@ -2,20 +2,62 @@ import {
ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { api } from './api';
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
import { getMainCommitPrompt } from './prompts';
import { getConfig } from './commands/config';
import { mergeDiffs } from './utils/mergeDiffs';
import { i18n, I18nLocals } from './i18n';
import { tokenCount } from './utils/tokenCount';
const config = getConfig();
const translation = i18n[(config?.language as I18nLocals) || 'en'];
const generateCommitMessageChatCompletionPrompt = async (
const INIT_MESSAGES_PROMPT: Array<ChatCompletionRequestMessage> = [
{
role: ChatCompletionRequestMessageRoleEnum.System,
// prettier-ignore
content: `You are to act as the author of a commit message in git. Your mission is to create clean and comprehensive commit messages in the conventional commit convention and explain why a change was done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
${config?.emoji? 'Use GitMoji convention to preface the commit.': 'Do not preface the commit with anything.'}
${config?.description ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.': "Don't add any descriptions to the commit, only commit message."}
Use the present tense. Lines must not be longer than 74 characters. Use ${translation.localLanguage} to answer.`
},
{
role: ChatCompletionRequestMessageRoleEnum.User,
content: `diff --git a/src/server.ts b/src/server.ts
index ad4db42..f3b18a9 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -10,7 +10,7 @@
import {
initWinstonLogger();
const app = express();
-const port = 7799;
+const PORT = 7799;
app.use(express.json());
@@ -34,6 +34,6 @@
app.use((_, res, next) => {
// ROUTES
app.use(PROTECTED_ROUTER_URL, protectedRouter);
-app.listen(port, () => {
- console.log(\`Server listening on port \${port}\`);
+app.listen(process.env.PORT || PORT, () => {
+ console.log(\`Server listening on port \${PORT}\`);
});`
},
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: `${config?.emoji ? '🐛 ' : ''}${translation.commitFix}
${config?.emoji ? '✨ ' : ''}${translation.commitFeat}
${config?.description ? translation.commitDescription : ''}`
}
];
const generateCommitMessageChatCompletionPrompt = (
diff: string
): Promise<Array<ChatCompletionRequestMessage>> => {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
): Array<ChatCompletionRequestMessage> => {
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
chatContextAsCompletionRequest.push({
@@ -32,49 +74,41 @@ export enum GenerateCommitMessageErrorEnum {
emptyMessage = 'EMPTY_MESSAGE'
}
const ADJUSTMENT_FACTOR = 20;
interface GenerateCommitMessageError {
error: GenerateCommitMessageErrorEnum;
}
export const generateCommitMessageByDiff = async (
const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map(
(msg) => tokenCount(msg.content) + 4
).reduce((a, b) => a + b, 0);
const MAX_REQ_TOKENS = 3900 - INIT_MESSAGES_PROMPT_LENGTH;
export const generateCommitMessageWithChatCompletion = async (
diff: string
): Promise<string> => {
): Promise<string | GenerateCommitMessageError> => {
try {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt();
const INIT_MESSAGES_PROMPT_LENGTH = INIT_MESSAGES_PROMPT.map(
(msg) => tokenCount(msg.content) + 4
).reduce((a, b) => a + b, 0);
const MAX_REQUEST_TOKENS =
DEFAULT_MODEL_TOKEN_LIMIT -
ADJUSTMENT_FACTOR -
INIT_MESSAGES_PROMPT_LENGTH -
config?.OCO_OPENAI_MAX_TOKENS;
if (tokenCount(diff) >= MAX_REQUEST_TOKENS) {
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs(
if (tokenCount(diff) >= MAX_REQ_TOKENS) {
const commitMessagePromises = getCommitMsgsPromisesFromFileDiffs(
diff,
MAX_REQUEST_TOKENS
MAX_REQ_TOKENS
);
const commitMessages = [];
for (const promise of commitMessagePromises) {
commitMessages.push(await promise);
await delay(2000);
}
const commitMessages = await Promise.all(commitMessagePromises);
return commitMessages.join('\n\n');
} else {
const messages = generateCommitMessageChatCompletionPrompt(diff);
const commitMessage = await api.generateCommitMessage(messages);
if (!commitMessage)
return { error: GenerateCommitMessageErrorEnum.emptyMessage };
return commitMessage;
}
const messages = await generateCommitMessageChatCompletionPrompt(diff);
const commitMessage = await api.generateCommitMessage(messages);
if (!commitMessage)
throw new Error(GenerateCommitMessageErrorEnum.emptyMessage);
return commitMessage;
} catch (error) {
throw error;
return { error: GenerateCommitMessageErrorEnum.internalError };
}
};
@@ -92,67 +126,25 @@ function getMessagesPromisesByChangesInFile(
maxChangeLength
);
const lineDiffsWithHeader = [];
for (const change of mergedChanges) {
const totalChange = fileHeader + change;
if (tokenCount(totalChange) > maxChangeLength) {
// If the totalChange is too large, split it into smaller pieces
const splitChanges = splitDiff(totalChange, maxChangeLength);
lineDiffsWithHeader.push(...splitChanges);
} else {
lineDiffsWithHeader.push(totalChange);
}
}
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
async (lineDiff) => {
const messages = await generateCommitMessageChatCompletionPrompt(
separator + lineDiff
);
return api.generateCommitMessage(messages);
}
const lineDiffsWithHeader = mergedChanges.map(
(change) => fileHeader + change
);
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map((lineDiff) => {
const messages = generateCommitMessageChatCompletionPrompt(
separator + lineDiff
);
return api.generateCommitMessage(messages);
});
return commitMsgsFromFileLineDiffs;
}
function splitDiff(diff: string, maxChangeLength: number) {
const lines = diff.split('\n');
const splitDiffs = [];
let currentDiff = '';
for (let line of lines) {
// If a single line exceeds maxChangeLength, split it into multiple lines
while (tokenCount(line) > maxChangeLength) {
const subLine = line.substring(0, maxChangeLength);
line = line.substring(maxChangeLength);
splitDiffs.push(subLine);
}
// Check the tokenCount of the currentDiff and the line separately
if (tokenCount(currentDiff) + tokenCount('\n' + line) > maxChangeLength) {
// If adding the next line would exceed the maxChangeLength, start a new diff
splitDiffs.push(currentDiff);
currentDiff = line;
} else {
// Otherwise, add the line to the current diff
currentDiff += '\n' + line;
}
}
// Add the last diff
if (currentDiff) {
splitDiffs.push(currentDiff);
}
return splitDiffs;
}
export const getCommitMsgsPromisesFromFileDiffs = async (
export function getCommitMsgsPromisesFromFileDiffs(
diff: string,
maxDiffLength: number
) => {
) {
const separator = 'diff --git ';
const diffByFiles = diff.split(separator).slice(1);
@@ -173,17 +165,12 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
commitMessagePromises.push(...messagesPromises);
} else {
const messages = await generateCommitMessageChatCompletionPrompt(
const messages = generateCommitMessageChatCompletionPrompt(
separator + fileDiff
);
commitMessagePromises.push(api.generateCommitMessage(messages));
}
}
return commitMessagePromises;
};
function delay(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

View File

@@ -1,228 +0,0 @@
import { unlinkSync, writeFileSync } from 'fs';
import core from '@actions/core';
import exec from '@actions/exec';
import github from '@actions/github';
import { intro, outro } from '@clack/prompts';
import { PushEvent } from '@octokit/webhooks-types';
import { generateCommitMessageByDiff } from './generateCommitMessageFromGitDiff';
import { randomIntFromInterval } from './utils/randomIntFromInterval';
import { sleep } from './utils/sleep';
// This should be a token with access to your repository scoped in as a secret.
// The YML workflow will need to set GITHUB_TOKEN with the GitHub Secret Token
// GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
// https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token#about-the-github_token-secret
const GITHUB_TOKEN = core.getInput('GITHUB_TOKEN');
const octokit = github.getOctokit(GITHUB_TOKEN);
const context = github.context;
const owner = context.repo.owner;
const repo = context.repo.repo;
type SHA = string;
type Diff = string;
async function getCommitDiff(commitSha: string) {
const diffResponse = await octokit.request<string>(
'GET /repos/{owner}/{repo}/commits/{ref}',
{
owner,
repo,
ref: commitSha,
headers: {
Accept: 'application/vnd.github.v3.diff'
}
}
);
return { sha: commitSha, diff: diffResponse.data };
}
interface DiffAndSHA {
sha: SHA;
diff: Diff;
}
interface MsgAndSHA {
sha: SHA;
msg: string;
}
// send only 3-4 size chunks of diffs in steps,
// because openAI restricts "too many requests" at once with 429 error
async function improveMessagesInChunks(diffsAndSHAs: DiffAndSHA[]) {
const chunkSize = diffsAndSHAs!.length % 2 === 0 ? 4 : 3;
outro(`Improving commit messages in chunks of ${chunkSize}.`);
const improvePromises = diffsAndSHAs!.map((commit) =>
generateCommitMessageByDiff(commit.diff)
);
let improvedMessagesAndSHAs: MsgAndSHA[] = [];
for (let step = 0; step < improvePromises.length; step += chunkSize) {
const chunkOfPromises = improvePromises.slice(step, step + chunkSize);
try {
const chunkOfImprovedMessages = await Promise.all(chunkOfPromises);
const chunkOfImprovedMessagesBySha = chunkOfImprovedMessages.map(
(improvedMsg, i) => {
const index = improvedMessagesAndSHAs.length;
const sha = diffsAndSHAs![index + i].sha;
return { sha, msg: improvedMsg };
}
);
improvedMessagesAndSHAs.push(...chunkOfImprovedMessagesBySha);
// sometimes openAI errors with 429 code (too many requests),
// so lets sleep a bit
const sleepFor =
1000 * randomIntFromInterval(1, 5) + 100 * randomIntFromInterval(1, 5);
outro(
`Improved ${chunkOfPromises.length} messages. Sleeping for ${sleepFor}`
);
await sleep(sleepFor);
} catch (error) {
outro(error as string);
// if sleeping in try block still fails with 429,
// openAI wants at least 1 minute before next request
const sleepFor = 60000 + 1000 * randomIntFromInterval(1, 5);
outro(`Retrying after sleeping for ${sleepFor}`);
await sleep(sleepFor);
// go to previous step
step -= chunkSize;
}
}
return improvedMessagesAndSHAs;
}
const getDiffsBySHAs = async (SHAs: string[]) => {
const diffPromises = SHAs.map((sha) => getCommitDiff(sha));
const diffs = await Promise.all(diffPromises).catch((error) => {
outro(`Error in Promise.all(getCommitDiffs(SHAs)): ${error}.`);
throw error;
});
return diffs;
};
async function improveCommitMessages(
commitsToImprove: { id: string; message: string }[]
): Promise<void> {
if (commitsToImprove.length) {
outro(`Found ${commitsToImprove.length} commits to improve.`);
} else {
outro('No new commits found.');
return;
}
outro('Fetching commit diffs by SHAs.');
const commitSHAsToImprove = commitsToImprove.map((commit) => commit.id);
const diffsWithSHAs = await getDiffsBySHAs(commitSHAsToImprove);
outro('Done.');
const improvedMessagesWithSHAs = await improveMessagesInChunks(diffsWithSHAs);
console.log(
`Improved ${improvedMessagesWithSHAs.length} commits: `,
improvedMessagesWithSHAs
);
// Check if there are actually any changes in the commit messages
const messagesChanged = improvedMessagesWithSHAs.some(
({ sha, msg }, index) => msg !== commitsToImprove[index].message
);
if (!messagesChanged) {
console.log('No changes in commit messages detected, skipping rebase');
return;
}
const createCommitMessageFile = (message: string, index: number) =>
writeFileSync(`./commit-${index}.txt`, message);
improvedMessagesWithSHAs.forEach(({ msg }, i) =>
createCommitMessageFile(msg, i)
);
writeFileSync(`./count.txt`, '0');
writeFileSync(
'./rebase-exec.sh',
`#!/bin/bash
count=$(cat count.txt)
git commit --amend -F commit-$count.txt
echo $(( count + 1 )) > count.txt`
);
await exec.exec(`chmod +x ./rebase-exec.sh`);
await exec.exec(
'git',
['rebase', `${commitsToImprove[0].id}^`, '--exec', './rebase-exec.sh'],
{
env: {
GIT_SEQUENCE_EDITOR: 'sed -i -e "s/^pick/reword/g"',
GIT_COMMITTER_NAME: process.env.GITHUB_ACTOR!,
GIT_COMMITTER_EMAIL: `${process.env.GITHUB_ACTOR}@users.noreply.github.com`
}
}
);
const deleteCommitMessageFile = (index: number) =>
unlinkSync(`./commit-${index}.txt`);
commitsToImprove.forEach((_commit, i) => deleteCommitMessageFile(i));
unlinkSync('./count.txt');
unlinkSync('./rebase-exec.sh');
outro('Force pushing non-interactively rebased commits into remote.');
await exec.exec('git', ['status']);
// Force push the rebased commits
await exec.exec('git', ['push', `--force`]);
outro('Done 🧙');
}
async function run() {
intro('OpenCommit — improving lame commit messages');
try {
if (github.context.eventName === 'push') {
outro(`Processing commits in a Push event`);
const payload = github.context.payload as PushEvent;
const commits = payload.commits;
// Set local Git user identity for future git history manipulations
if (payload.pusher.email)
await exec.exec('git', ['config', 'user.email', payload.pusher.email]);
await exec.exec('git', ['config', 'user.name', payload.pusher.name]);
await exec.exec('git', ['status']);
await exec.exec('git', ['log', '--oneline']);
await improveCommitMessages(commits);
} else {
outro('Wrong action.');
core.error(
`OpenCommit was called on ${github.context.payload.action}. OpenCommit is supposed to be used on "push" action.`
);
}
} catch (error: any) {
const err = error?.message || error;
core.setFailed(err);
}
}
run();

View File

@@ -1,6 +1,6 @@
{
"localLanguage": "english",
"commitFix": "fix(server.ts): change port variable case from lowercase port to uppercase PORT to improve semantics",
"commitFeat": "feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port",
"commitFix": "fix(server.ts): change port variable case from lowercase port to uppercase PORT",
"commitFeat": "feat(server.ts): add support for process.env.PORT environment variable",
"commitDescription": "The port variable is now named PORT, which improves consistency with the naming conventions as PORT is a constant. Support for an environment variable allows the application to be more flexible as it can now run on any available port specified via the process.env.PORT environment variable."
}

View File

@@ -1,22 +1,22 @@
import cs from '../i18n/cs.json';
import de from '../i18n/de.json';
import en from '../i18n/en.json';
import es_ES from '../i18n/es_ES.json';
import fr from '../i18n/fr.json';
import id_ID from '../i18n/id_ID.json';
import it from '../i18n/it.json';
import ja from '../i18n/ja.json';
import ko from '../i18n/ko.json';
import nl from '../i18n/nl.json';
import pl from '../i18n/pl.json';
import pt_br from '../i18n/pt_br.json';
import ru from '../i18n/ru.json';
import sv from '../i18n/sv.json';
import th from '../i18n/th.json';
import tr from '../i18n/tr.json';
import vi_VN from '../i18n/vi_VN.json';
import zh_CN from '../i18n/zh_CN.json';
import zh_TW from '../i18n/zh_TW.json';
import en from '../i18n/en.json' assert { type: 'json' };
import cs from '../i18n/cs.json' assert { type: 'json' };
import de from '../i18n/de.json' assert { type: 'json' };
import fr from '../i18n/fr.json' assert { type: 'json' };
import it from '../i18n/it.json' assert { type: 'json' };
import ko from '../i18n/ko.json' assert { type: 'json' };
import zh_CN from '../i18n/zh_CN.json' assert { type: 'json' };
import zh_TW from '../i18n/zh_TW.json' assert { type: 'json' };
import ja from '../i18n/ja.json' assert { type: 'json' };
import pt_br from '../i18n/pt_br.json' assert { type: 'json' };
import vi_VN from '../i18n/vi_VN.json' assert { type: 'json' };
import es_ES from '../i18n/es_ES.json' assert { type: 'json' };
import sv from '../i18n/sv.json' assert { type: 'json' };
import nl from '../i18n/nl.json' assert { type: 'json' };
import ru from '../i18n/ru.json' assert { type: 'json' };
import id_ID from '../i18n/id_ID.json' assert { type: 'json' };
import pl from '../i18n/pl.json' assert { type: 'json' };
import tr from '../i18n/tr.json' assert { type: 'json' };
import th from '../i18n/th.json' assert { type: 'json' };
export enum I18nLocals {
'en' = 'en',
@@ -36,7 +36,7 @@ export enum I18nLocals {
'id_ID' = 'id_ID',
'pl' = 'pl',
'tr' = 'tr',
'th' = 'th'
'th' = 'th',
}
export const i18n = {

View File

@@ -1,79 +0,0 @@
import { spinner } from '@clack/prompts';
import { api } from '../../api';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
import { computeHash } from './crypto';
import { commitlintPrompts, inferPromptsFromCommitlintConfig } from './prompts';
import { getCommitLintPWDConfig } from './pwd-commitlint';
import { CommitlintLLMConfig } from './types';
import * as utils from './utils';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
export const configureCommitlintIntegration = async (force = false) => {
const spin = spinner();
spin.start('Loading @commitlint configuration');
const fileExists = await utils.commitlintLLMConfigExists();
let commitLintConfig = await getCommitLintPWDConfig();
// debug complete @commitlint configuration
// await fs.writeFile(
// `${OPENCOMMIT_COMMITLINT_CONFIG}-commitlint-debug`,
// JSON.stringify(commitLintConfig, null, 2)
// );
const hash = await computeHash(JSON.stringify(commitLintConfig));
spin.stop(`Read @commitlint configuration (hash: ${hash})`);
if (fileExists) {
// Check if we need to update the prompts.
const { hash: existingHash } = await utils.getCommitlintLLMConfig();
if (hash === existingHash && !force) {
spin.stop(
'Hashes are the same, no need to update the config. Run "force" command to bypass.'
);
return;
}
}
spin.start('Generating consistency with given @commitlint rules');
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig);
const consistencyPrompts =
commitlintPrompts.GEN_COMMITLINT_CONSISTENCY_PROMPT(prompts);
// debug prompt which will generate a consistency
// await fs.writeFile(
// `${COMMITLINT_LLM_CONFIG}-debug`,
// consistencyPrompts.map((p) => p.content)
// );
let consistency =
(await api.generateCommitMessage(consistencyPrompts)) || '{}';
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));
// ... remaining might be extra set of "\n"
consistency = utils.removeDoubleNewlines(consistency);
const commitlintLLMConfig: CommitlintLLMConfig = {
hash,
prompts,
consistency: {
[translation.localLanguage]: {
...JSON.parse(consistency as string)
}
}
};
await utils.writeCommitlintLLMConfig(commitlintLLMConfig);
spin.stop(`Done - please review contents of ${COMMITLINT_LLM_CONFIG_PATH}`);
};

View File

@@ -1 +0,0 @@
export const COMMITLINT_LLM_CONFIG_PATH = `${process.env.PWD}/.opencommit-commitlint`;

View File

@@ -1,15 +0,0 @@
import crypto from 'crypto';
export const computeHash = async (
content: string,
algorithm: string = 'sha256'
): Promise<string> => {
try {
const hash = crypto.createHash(algorithm);
hash.update(content);
return hash.digest('hex');
} catch (error) {
console.error('Error while computing hash:', error);
throw error;
}
};

View File

@@ -1,283 +0,0 @@
import chalk from 'chalk';
import {
ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { outro } from '@clack/prompts';
import {
PromptConfig,
QualifiedConfig,
RuleConfigSeverity,
RuleConfigTuple
} from '@commitlint/types';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { IDENTITY, INIT_DIFF_PROMPT } from '../../prompts';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
type DeepPartial<T> = {
[P in keyof T]?: {
[K in keyof T[P]]?: T[P][K];
};
};
type PromptFunction = (
applicable: string,
value: any,
prompt: DeepPartial<PromptConfig>
) => string;
type PromptResolverFunction = (
key: string,
applicable: string,
value: any,
prompt?: DeepPartial<PromptConfig>
) => string;
/**
* Extracts more contexte for each type-enum.
* IDEA: replicate the concept for scopes and refactor to a generic feature.
*/
const getTypeRuleExtraDescription = (
type: string,
prompt?: DeepPartial<PromptConfig>
) => prompt?.questions?.type?.enum?.[type]?.description;
/*
IDEA: Compress llm readable prompt for each section of commit message: one line for header, one line for scope, etc.
- The type must be in lowercase and should be one of the following values: featuring, fixing, documenting, styling, refactoring, testing, chores, perf, build, ci, revert.
- The scope should not be empty and provide context for the change (e.g., module or file changed).
- The subject should not be empty, should not end with a period, and should provide a concise description of the change. It should not be in sentence-case, start-case, pascal-case, or upper-case.
*/
const llmReadableRules: {
[ruleName: string]: PromptResolverFunction;
} = {
blankline: (key, applicable) =>
`There should ${applicable} be a blank line at the beginning of the ${key}.`,
caseRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be in ${
Array.isArray(value)
? `one of the following case:
- ${value.join('\n - ')}.`
: `${value} case.`
}`,
emptyRule: (key, applicable) => `The ${key} should ${applicable} be empty.`,
enumRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be one of the following values:
- ${Array.isArray(value) ? value.join('\n - ') : value}.`,
enumTypeRule: (key, applicable, value: string | Array<string>, prompt) =>
`The ${key} should ${applicable} be one of the following values:
- ${
Array.isArray(value)
? value
.map((v) => {
const description = getTypeRuleExtraDescription(v, prompt);
if (description) {
return `${v} (${description})`;
} else return v;
})
.join('\n - ')
: value
}.`,
fullStopRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} end with '${value}'.`,
maxLengthRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} have ${value} characters or less.`,
minLengthRule: (key, applicable, value: string) =>
`The ${key} should ${applicable} have ${value} characters or more.`
};
/**
* TODO: Validate rules to every rule in the @commitlint configuration.
* IDEA: Plugins can extend the list of rule. Provide user with a way to infer or extend when "No prompt handler for rule".
*/
const rulesPrompts: {
[ruleName: string]: PromptFunction;
} = {
'body-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('body', applicable, value),
'body-empty': (applicable: string) =>
llmReadableRules.emptyRule('body', applicable, undefined),
'body-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('body', applicable, value),
'body-leading-blank': (applicable: string) =>
llmReadableRules.blankline('body', applicable, undefined),
'body-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('body', applicable, value),
'body-max-line-length': (applicable: string, value: string) =>
`Each line of the body should ${applicable} have ${value} characters or less.`,
'body-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('body', applicable, value),
'footer-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('footer', applicable, value),
'footer-empty': (applicable: string) =>
llmReadableRules.emptyRule('footer', applicable, undefined),
'footer-leading-blank': (applicable: string) =>
llmReadableRules.blankline('footer', applicable, undefined),
'footer-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('footer', applicable, value),
'footer-max-line-length': (applicable: string, value: string) =>
`Each line of the footer should ${applicable} have ${value} characters or less.`,
'footer-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('footer', applicable, value),
'header-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('header', applicable, value),
'header-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('header', applicable, value),
'header-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('header', applicable, value),
'header-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('header', applicable, value),
'references-empty': (applicable: string) =>
llmReadableRules.emptyRule('references section', applicable, undefined),
'scope-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('scope', applicable, value),
'scope-empty': (applicable: string) =>
llmReadableRules.emptyRule('scope', applicable, undefined),
'scope-enum': (applicable: string, value: string | Array<string>) =>
llmReadableRules.enumRule('type', applicable, value),
'scope-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('scope', applicable, value),
'scope-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('scope', applicable, value),
'signed-off-by': (applicable: string, value: string) =>
`The commit message should ${applicable} have a "Signed-off-by" line with the value "${value}".`,
'subject-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('subject', applicable, value),
'subject-empty': (applicable: string) =>
llmReadableRules.emptyRule('subject', applicable, undefined),
'subject-full-stop': (applicable: string, value: string) =>
llmReadableRules.fullStopRule('subject', applicable, value),
'subject-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('subject', applicable, value),
'subject-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('subject', applicable, value),
'type-case': (applicable: string, value: string | Array<string>) =>
llmReadableRules.caseRule('type', applicable, value),
'type-empty': (applicable: string) =>
llmReadableRules.emptyRule('type', applicable, undefined),
'type-enum': (applicable: string, value: string | Array<string>, prompt) =>
llmReadableRules.enumTypeRule('type', applicable, value, prompt),
'type-max-length': (applicable: string, value: string) =>
llmReadableRules.maxLengthRule('type', applicable, value),
'type-min-length': (applicable: string, value: string) =>
llmReadableRules.minLengthRule('type', applicable, value)
};
const getPrompt = (
ruleName: string,
ruleConfig: RuleConfigTuple<unknown>,
prompt: DeepPartial<PromptConfig>
) => {
const [severity, applicable, value] = ruleConfig;
// Should we exclude "Disabled" properties?
// Is this used to disable a subjacent rule when extending presets?
if (severity === RuleConfigSeverity.Disabled) return null;
const promptFn = rulesPrompts[ruleName];
if (promptFn) {
return promptFn(applicable, value, prompt);
}
// Plugins may add their custom rules.
// We might want to call OpenAI to build this rule's llm-readable prompt.
outro(`${chalk.red('✖')} No prompt handler for rule "${ruleName}".`);
return `Please manualy set the prompt for rule "${ruleName}".`;
};
export const inferPromptsFromCommitlintConfig = (
config: QualifiedConfig
): string[] => {
const { rules, prompt } = config;
if (!rules) return [];
return Object.keys(rules)
.map((ruleName) =>
getPrompt(ruleName, rules[ruleName] as RuleConfigTuple<unknown>, prompt)
)
.filter((prompt) => prompt !== null) as string[];
};
/**
* Breaking down commit message structure for conventional commit, and mapping bits with
* ubiquitous language from @commitlint.
* While gpt-4 does this on it self, gpt-3.5 can't map this on his own atm.
*/
const STRUCTURE_OF_COMMIT = `
- Header of commit is composed of type, scope, subject: <type-of-commit>(<scope-of-commit>): <subject-of-commit>
- Description of commit is composed of body and footer (optional): <body-of-commit>\n<footer(s)-of-commit>`;
// Prompt to generate LLM-readable rules based on @commitlint rules.
const GEN_COMMITLINT_CONSISTENCY_PROMPT = (
prompts: string[]
): ChatCompletionRequestMessage[] => [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
// prettier-ignore
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
Here are the specific requirements and conventions that should be strictly followed:
Commit Message Conventions:
- The commit message consists of three parts: Header, Body, and Footer.
- Header:
- Format: \`<type>(<scope>): <subject>\`
- ${prompts.join('\n- ')}
JSON Output Format:
- The JSON output should contain the commit messages for a bug fix and a new feature in the following format:
\`\`\`json
{
"localLanguage": "${translation.localLanguage}",
"commitFix": "<Header of commit for bug fix>",
"commitFeat": "<Header of commit for feature>",
"commitDescription": "<Description of commit for both the bug fix and the feature>"
}
\`\`\`
- The "commitDescription" should not include the commit messages header, only the description.
- Description should not be more than 74 characters.
Additional Details:
- Changing the variable 'port' to uppercase 'PORT' is considered a bug fix.
- Allowing the server to listen on a port specified through the environment variable is considered a new feature.
Example Git Diff is to follow:`
},
INIT_DIFF_PROMPT
];
/**
* Prompt to have LLM generate a message using @commitlint rules.
*
* @param language
* @param prompts
* @returns
*/
const INIT_MAIN_PROMPT = (
language: string,
prompts: string[]
): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.System,
// prettier-ignore
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.' : 'Do not preface the commit with anything.'}
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.' : "Don't add any descriptions to the commit, only commit message."}
Use the present tense. Use ${language} to answer.
You will strictly follow the following conventions to generate the content of the commit message:
- ${prompts.join('\n- ')}
The conventions refers to the following structure of commit message:
${STRUCTURE_OF_COMMIT}
`
});
export const commitlintPrompts = {
INIT_MAIN_PROMPT,
GEN_COMMITLINT_CONSISTENCY_PROMPT
};

View File

@@ -1,25 +0,0 @@
import path from 'path';
const nodeModulesPath = path.join(
process.env.PWD || process.cwd(),
'node_modules',
'@commitlint',
'load'
);
/**
* This code is loading the configuration for the `@commitlint` package from the current working
* directory (`process.env.PWD`) by requiring the `load` module from the `@commitlint` package.
*
* @returns
*/
export const getCommitLintPWDConfig = async () => {
const load = require(nodeModulesPath).default;
if (load && typeof load === 'function') {
return await load();
}
// @commitlint/load is not a function
return null;
};

View File

@@ -1,11 +0,0 @@
import { i18n } from '../../i18n';
export type ConsistencyPrompt = (typeof i18n)[keyof typeof i18n];
export type CommitlintLLMConfig = {
hash: string;
prompts: string[];
consistency: {
[key: string]: ConsistencyPrompt;
};
};

View File

@@ -1,47 +0,0 @@
import fs from 'fs/promises';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
import { CommitlintLLMConfig } from './types';
/**
* Removes the "\n" only if occurring twice
*/
export const removeDoubleNewlines = (input: string): string => {
const pattern = /\\n\\n/g;
if (pattern.test(input)) {
const newInput = input.replace(pattern, '');
return removeDoubleNewlines(newInput);
}
return input;
};
export const commitlintLLMConfigExists = async (): Promise<boolean> => {
let exists;
try {
await fs.access(COMMITLINT_LLM_CONFIG_PATH);
exists = true;
} catch (e) {
exists = false;
}
return exists;
};
export const writeCommitlintLLMConfig = async (
commitlintLLMConfig: CommitlintLLMConfig
): Promise<void> => {
await fs.writeFile(
COMMITLINT_LLM_CONFIG_PATH,
JSON.stringify(commitlintLLMConfig, null, 2)
);
};
export const getCommitlintLLMConfig =
async (): Promise<CommitlintLLMConfig> => {
const content = await fs.readFile(COMMITLINT_LLM_CONFIG_PATH);
const commitLintLLMConfig = JSON.parse(
content.toString()
) as CommitlintLLMConfig;
return commitLintLLMConfig;
};

View File

@@ -1,110 +0,0 @@
import {
ChatCompletionRequestMessage,
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { note } from '@clack/prompts';
import { getConfig } from './commands/config';
import { i18n, I18nLocals } from './i18n';
import { configureCommitlintIntegration } from './modules/commitlint/config';
import { commitlintPrompts } from './modules/commitlint/prompts';
import { ConsistencyPrompt } from './modules/commitlint/types';
import * as utils from './modules/commitlint/utils';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
export const IDENTITY =
'You are to act as the author of a commit message in git.';
const INIT_MAIN_PROMPT = (language: string): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.System,
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages as per the conventional commit convention and explain WHAT were the changes and mainly WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you are to convert it into a commit message.
${
config?.OCO_EMOJI
? 'Use GitMoji convention to preface the commit.'
: 'Do not preface the commit with anything.'
}
${
config?.OCO_DESCRIPTION
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
: "Don't add any descriptions to the commit, only commit message."
}
Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`
});
export const INIT_DIFF_PROMPT: ChatCompletionRequestMessage = {
role: ChatCompletionRequestMessageRoleEnum.User,
content: `diff --git a/src/server.ts b/src/server.ts
index ad4db42..f3b18a9 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -10,7 +10,7 @@
import {
initWinstonLogger();
const app = express();
-const port = 7799;
+const PORT = 7799;
app.use(express.json());
@@ -34,6 +34,6 @@
app.use((_, res, next) => {
// ROUTES
app.use(PROTECTED_ROUTER_URL, protectedRouter);
-app.listen(port, () => {
- console.log(\`Server listening on port \${port}\`);
+app.listen(process.env.PORT || PORT, () => {
+ console.log(\`Server listening on port \${PORT}\`);
});`
};
const INIT_CONSISTENCY_PROMPT = (
translation: ConsistencyPrompt
): ChatCompletionRequestMessage => ({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: `${config?.OCO_EMOJI ? '🐛 ' : ''}${translation.commitFix}
${config?.OCO_EMOJI ? '✨ ' : ''}${translation.commitFeat}
${config?.OCO_DESCRIPTION ? translation.commitDescription : ''}`
});
export const getMainCommitPrompt = async (): Promise<
ChatCompletionRequestMessage[]
> => {
switch (config?.OCO_PROMPT_MODULE) {
case '@commitlint':
if (!(await utils.commitlintLLMConfigExists())) {
note(
`OCO_PROMPT_MODULE is @commitlint but you haven't generated consistency for this project yet.`
);
await configureCommitlintIntegration();
}
// Replace example prompt with a prompt that's generated by OpenAI for the commitlint config.
const commitLintConfig = await utils.getCommitlintLLMConfig();
return [
commitlintPrompts.INIT_MAIN_PROMPT(
translation.localLanguage,
commitLintConfig.prompts
),
INIT_DIFF_PROMPT,
INIT_CONSISTENCY_PROMPT(
commitLintConfig.consistency[
translation.localLanguage
] as ConsistencyPrompt
)
];
default:
// conventional-commit
return [
INIT_MAIN_PROMPT(translation.localLanguage),
INIT_DIFF_PROMPT,
INIT_CONSISTENCY_PROMPT(translation)
];
}
};

View File

@@ -1,9 +1,6 @@
import chalk from 'chalk';
import { outro } from '@clack/prompts';
import currentPackage from '../../package.json';
import { getOpenCommitLatestVersion } from '../api';
import currentPackage from '../../package.json' assert { type: 'json' };
import chalk from 'chalk';
export const checkIsLatestVersion = async () => {
const latestVersion = await getOpenCommitLatestVersion();
@@ -12,7 +9,7 @@ export const checkIsLatestVersion = async () => {
const currentVersion = currentPackage.version;
if (currentVersion !== latestVersion) {
outro(
console.warn(
chalk.yellow(
`
You are not using the latest stable version of OpenCommit with new features and bug fixes.

View File

@@ -1,9 +1,8 @@
import { execa } from 'execa';
import { outro, spinner } from '@clack/prompts';
import { readFileSync } from 'fs';
import ignore, { Ignore } from 'ignore';
import { outro, spinner } from '@clack/prompts';
export const assertGitRepo = async () => {
try {
await execa('git', ['rev-parse']);
@@ -26,11 +25,13 @@ export const getOpenCommitIgnore = (): Ignore => {
return ig;
};
export const getCoreHooksPath = async (): Promise<string> => {
const { stdout } = await execa('git', ['config', 'core.hooksPath']);
export const getCoreHooksPath = async(): Promise<string> => {
const { stdout } = await execa('git', [
'config',
'core.hooksPath']);
return stdout;
};
}
export const getStagedFiles = async (): Promise<string[]> => {
const { stdout: gitDir } = await execa('git', [
@@ -82,20 +83,12 @@ export const gitAdd = async ({ files }: { files: string[] }) => {
export const getDiff = async ({ files }: { files: string[] }) => {
const lockFiles = files.filter(
(file) =>
file.includes('.lock') ||
file.includes('-lock.') ||
file.includes('.svg') ||
file.includes('.png') ||
file.includes('.jpg') ||
file.includes('.jpeg') ||
file.includes('.webp') ||
file.includes('.gif')
(file) => file.includes('.lock') || file.includes('-lock.')
);
if (lockFiles.length) {
outro(
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join(
`Some files are '.lock' files which are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join(
'\n'
)}`
);

View File

@@ -1,5 +1,4 @@
import { tokenCount } from './tokenCount';
export function mergeDiffs(arr: string[], maxStringLength: number): string[] {
const mergedArr: string[] = [];
let currentItem: string = arr[0];

View File

@@ -1,4 +0,0 @@
export function randomIntFromInterval(min: number, max: number) {
// min and max included
return Math.floor(Math.random() * (max - min + 1) + min);
}

View File

@@ -1,3 +0,0 @@
export function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

View File

@@ -1,5 +1,5 @@
import cl100k_base from '@dqbd/tiktoken/encoders/cl100k_base.json';
import { Tiktoken } from '@dqbd/tiktoken/lite';
import cl100k_base from '@dqbd/tiktoken/encoders/cl100k_base.json' assert { type: 'json' };
export function tokenCount(content: string): number {
const encoding = new Tiktoken(

View File

@@ -5,8 +5,8 @@
"module": "ESNext",
// "rootDir": "./src",
"resolveJsonModule": true,
"moduleResolution": "node",
"resolveJsonModule": true,
"allowJs": true,