Compare commits

..

1 Commits

Author SHA1 Message Date
di-sukharev
cdfc461d00 3.2.6 2024-12-14 20:10:15 +01:00
68 changed files with 21800 additions and 30806 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 4.7 KiB

BIN
.github/logo-black.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

14
.github/logo-grad.svg vendored
View File

@@ -1 +1,13 @@
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)" d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z"/><defs><linearGradient id="paint0_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient><linearGradient id="paint1_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient></defs></svg>
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z" fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)"/>
<defs>
<linearGradient id="paint0_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
<stop stop-color="#D33075"/>
<stop offset="1" stop-color="#6157D8"/>
</linearGradient>
<linearGradient id="paint1_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
<stop stop-color="#D33075"/>
<stop offset="1" stop-color="#6157D8"/>
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 3.2 KiB

After

Width:  |  Height:  |  Size: 3.2 KiB

5
.github/logo.svg vendored
View File

@@ -1 +1,4 @@
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="#000" d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z"/><path fill="#000" d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z"/></svg>
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z" fill="black"/>
<path d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z" fill="black"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.7 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 237 KiB

After

Width:  |  Height:  |  Size: 304 KiB

View File

@@ -40,11 +40,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -58,7 +58,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -71,6 +71,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

View File

@@ -15,6 +15,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: 'Dependency Review'
uses: actions/dependency-review-action@v3
uses: actions/dependency-review-action@v2

View File

@@ -1,11 +1,6 @@
name: Testing
on:
pull_request:
push:
branches:
- master
- main
on: [pull_request]
jobs:
unit-test:
@@ -14,12 +9,11 @@ jobs:
matrix:
node-version: [20.x]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Install dependencies
run: npm install
- name: Run Unit Tests
@@ -30,12 +24,11 @@ jobs:
matrix:
node-version: [20.x]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
- name: Install git
run: |
sudo apt-get update
@@ -51,21 +44,3 @@ jobs:
run: npm run build
- name: Run E2E Tests
run: npm run test:e2e
prettier:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v4
with:
node-version: '20.x'
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run Prettier
run: npm run format:check
- name: Prettier Output
if: failure()
run: |
echo "Prettier check failed. Please run 'npm run format' to fix formatting issues."
exit 1

3
.gitignore vendored
View File

@@ -11,5 +11,4 @@ uncaughtExceptions.log
src/*.json
.idea
test.ts
notes.md
.nvmrc
notes.md

View File

@@ -74,22 +74,6 @@ oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
#### Troubleshooting Ollama IPv6/IPv4 Connection Fix
If you encounter issues with Ollama, such as the error
```sh
✖ local model issues. details: connect ECONNREFUSED ::1:11434
```
It's likely because Ollama is not listening on IPv6 by default. To fix this, you can set the OLLAMA_HOST environment variable to 0.0.0.0 before starting Ollama:
```bash
export OLLAMA_HOST=0.0.0.0
```
This will make Ollama listen on all interfaces, including IPv6 and IPv4, resolving the connection issue. You can add this line to your shell configuration file (like `.bashrc` or `.zshrc`) to make it persistent across sessions.
### Flags
There are multiple optional flags that can be used with the `oco` command:
@@ -122,15 +106,14 @@ Create a `.env` file and add OpenCommit config variables there like this:
```env
...
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi>
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise>
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
OCO_API_URL=<may be used to set proxy path to OpenAI api>
OCO_API_CUSTOM_HEADERS=<JSON string of custom HTTP headers to include in API requests>
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
OCO_EMOJI=<boolean, add GitMoji>
OCO_MODEL=<either 'gpt-4o-mini' (default), 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
@@ -149,18 +132,6 @@ Simply set any of the variables above like this:
oco config set OCO_MODEL=gpt-4o-mini
```
To see all available configuration parameters and their accepted values:
```sh
oco config describe
```
To see details for a specific parameter:
```sh
oco config describe OCO_MODEL
```
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
```sh

View File

@@ -9,33 +9,19 @@ const config: Config = {
testTimeout: 100_000,
coverageProvider: 'v8',
moduleDirectories: ['node_modules', 'src'],
preset: 'ts-jest/presets/default-esm',
preset: 'ts-jest/presets/js-with-ts-esm',
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
testEnvironment: 'node',
testRegex: ['.*\\.test\\.ts$'],
// Tell Jest to ignore the specific duplicate package.json files
// that are causing Haste module naming collisions
modulePathIgnorePatterns: [
'<rootDir>/test/e2e/prompt-module/data/'
],
transformIgnorePatterns: [
'node_modules/(?!(cli-testing-library|@clack|cleye)/.*)'
],
transformIgnorePatterns: ['node_modules/(?!cli-testing-library)'],
transform: {
'^.+\\.(ts|tsx|js|jsx|mjs)$': [
'^.+\\.(ts|tsx)$': [
'ts-jest',
{
diagnostics: false,
useESM: true,
tsconfig: {
module: 'ESNext',
target: 'ES2022'
}
useESM: true
}
]
},
moduleNameMapper: {
'^(\\.{1,2}/.*)\\.js$': '$1'
}
};

18696
out/cli.cjs

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4319
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "opencommit",
"version": "3.2.10",
"version": "3.2.6",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",
@@ -44,14 +44,13 @@
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
"dev": "ts-node ./src/cli.ts",
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
"build": "npx rimraf out && node esbuild.config.js",
"build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
"deploy": "npm publish --tag latest",
"deploy:build": "npm run build:push && git push --tags && npm run deploy",
"deploy:patch": "npm version patch && npm run deploy:build",
"lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src",
"format:check": "prettier --check src",
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
@@ -68,15 +67,14 @@
"@types/inquirer": "^9.0.3",
"@types/jest": "^29.5.12",
"@types/node": "^16.18.14",
"@typescript-eslint/eslint-plugin": "^8.29.0",
"@typescript-eslint/parser": "^8.29.0",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"cli-testing-library": "^2.0.2",
"dotenv": "^16.0.3",
"esbuild": "^0.25.5",
"eslint": "^9.24.0",
"esbuild": "^0.15.18",
"eslint": "^8.28.0",
"jest": "^29.7.0",
"prettier": "^2.8.4",
"rimraf": "^6.0.1",
"ts-jest": "^29.1.2",
"ts-node": "^10.9.1",
"typescript": "^4.9.3"
@@ -84,7 +82,7 @@
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.1",
"@actions/github": "^5.1.1",
"@anthropic-ai/sdk": "^0.19.2",
"@azure/openai": "^1.0.0-beta.12",
"@clack/prompts": "^0.6.1",
@@ -104,9 +102,5 @@
"openai": "^4.57.0",
"punycode": "^2.3.1",
"zod": "^3.23.8"
},
"overrides": {
"ajv": "^8.17.1",
"whatwg-url": "^14.0.0"
}
}

View File

@@ -19,11 +19,7 @@ cli(
name: 'opencommit',
commands: [configCommand, hookCommand, commitlintConfigCommand],
flags: {
fgm: {
type: Boolean,
description: 'Use full GitMoji specification',
default: false
},
fgm: Boolean,
context: {
type: String,
alias: 'c',

View File

@@ -1,5 +1,4 @@
import {
text,
confirm,
intro,
isCancel,
@@ -86,29 +85,15 @@ ${commitMessage}
${chalk.grey('——————————————————')}`
);
const userAction = skipCommitConfirmation
? 'Yes'
: await select({
message: 'Confirm the commit message?',
options: [
{ value: 'Yes', label: 'Yes' },
{ value: 'No', label: 'No' },
{ value: 'Edit', label: 'Edit' }
]
});
const isCommitConfirmedByUser =
skipCommitConfirmation ||
(await confirm({
message: 'Confirm the commit message?'
}));
if (isCancel(userAction)) process.exit(1);
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
if (userAction === 'Edit') {
const textResponse = await text({
message: 'Please edit the commit message: (press Enter to continue)',
initialValue: commitMessage
});
commitMessage = textResponse.toString();
}
if (userAction === 'Yes' || userAction === 'Edit') {
if (isCommitConfirmedByUser) {
const committingChangesSpinner = spinner();
committingChangesSpinner.start('Committing the changes');
const { stdout } = await execa('git', [
@@ -153,8 +138,7 @@ ${chalk.grey('——————————————————')}`
]);
pushSpinner.stop(
`${chalk.green('✔')} Successfully pushed all commits to ${
remotes[0]
`${chalk.green('✔')} Successfully pushed all commits to ${remotes[0]
}`
);
@@ -164,26 +148,23 @@ ${chalk.grey('——————————————————')}`
process.exit(0);
}
} else {
const skipOption = `don't push`;
const skipOption = `don't push`
const selectedRemote = (await select({
message: 'Choose a remote to push to',
options: [...remotes, skipOption].map((remote) => ({
value: remote,
label: remote
}))
options: [...remotes, skipOption].map((remote) => ({ value: remote, label: remote })),
})) as string;
if (isCancel(selectedRemote)) process.exit(1);
if (selectedRemote !== skipOption) {
const pushSpinner = spinner();
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
const { stdout } = await execa('git', ['push', selectedRemote]);
if (stdout) outro(stdout);
pushSpinner.stop(
`${chalk.green(
'✔'
@@ -254,9 +235,8 @@ export async function commit(
stagedFilesSpinner.start('Counting staged files');
if (stagedFiles.length === 0) {
if (!stagedFiles.length) {
stagedFilesSpinner.stop('No files are staged');
const isStageAllAndCommitConfirmedByUser = await confirm({
message: 'Do you want to stage all files and generate commit message?'
});
@@ -265,7 +245,7 @@ export async function commit(
if (isStageAllAndCommitConfirmedByUser) {
await commit(extraArgs, context, true, fullGitMojiSpec);
process.exit(0);
process.exit(1);
}
if (stagedFiles.length === 0 && changedFiles.length > 0) {
@@ -277,13 +257,13 @@ export async function commit(
}))
})) as string[];
if (isCancel(files)) process.exit(0);
if (isCancel(files)) process.exit(1);
await gitAdd({ files });
}
await commit(extraArgs, context, false, fullGitMojiSpec);
process.exit(0);
process.exit(1);
}
stagedFilesSpinner.stop(

View File

@@ -25,16 +25,12 @@ export enum CONFIG_KEYS {
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
OCO_API_URL = 'OCO_API_URL',
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT'
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
}
export enum CONFIG_MODES {
get = 'get',
set = 'set',
describe = 'describe'
set = 'set'
}
export const MODEL_LIST = {
@@ -131,444 +127,7 @@ export const MODEL_LIST = {
'pixtral-12b-latest',
'mistral-embed',
'mistral-moderation-2411',
'mistral-moderation-latest'
],
deepseek: ['deepseek-chat', 'deepseek-reasoner'],
// AI/ML API available chat-completion models
// https://api.aimlapi.com/v1/models
aimlapi: [
'openai/gpt-4o',
'gpt-4o-2024-08-06',
'gpt-4o-2024-05-13',
'gpt-4o-mini',
'gpt-4o-mini-2024-07-18',
'chatgpt-4o-latest',
'gpt-4-turbo',
'gpt-4-turbo-2024-04-09',
'gpt-4',
'gpt-4-0125-preview',
'gpt-4-1106-preview',
'gpt-3.5-turbo',
'gpt-3.5-turbo-0125',
'gpt-3.5-turbo-1106',
'o1-preview',
'o1-preview-2024-09-12',
'o1-mini',
'o1-mini-2024-09-12',
'o3-mini',
'gpt-4o-audio-preview',
'gpt-4o-mini-audio-preview',
'gpt-4o-search-preview',
'gpt-4o-mini-search-preview',
'openai/gpt-4.1-2025-04-14',
'openai/gpt-4.1-mini-2025-04-14',
'openai/gpt-4.1-nano-2025-04-14',
'openai/o4-mini-2025-04-16',
'openai/o3-2025-04-16',
'o1',
'openai/o3-pro',
'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
'google/gemma-2-27b-it',
'meta-llama/Llama-Vision-Free',
'Qwen/Qwen2-72B-Instruct',
'mistralai/Mixtral-8x7B-Instruct-v0.1',
'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
'meta-llama/Llama-3.3-70B-Instruct-Turbo',
'meta-llama/Llama-3.2-3B-Instruct-Turbo',
'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo',
'meta-llama/Llama-Guard-3-11B-Vision-Turbo',
'Qwen/Qwen2.5-7B-Instruct-Turbo',
'Qwen/Qwen2.5-Coder-32B-Instruct',
'meta-llama/Meta-Llama-3-8B-Instruct-Lite',
'meta-llama/Llama-3-8b-chat-hf',
'meta-llama/Llama-3-70b-chat-hf',
'Qwen/Qwen2.5-72B-Instruct-Turbo',
'Qwen/QwQ-32B',
'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',
'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',
'mistralai/Mistral-7B-Instruct-v0.2',
'meta-llama/LlamaGuard-2-8b',
'mistralai/Mistral-7B-Instruct-v0.1',
'mistralai/Mistral-7B-Instruct-v0.3',
'meta-llama/Meta-Llama-Guard-3-8B',
'meta-llama/llama-4-scout',
'meta-llama/llama-4-maverick',
'Qwen/Qwen3-235B-A22B-fp8-tput',
'claude-3-opus-20240229',
'claude-3-haiku-20240307',
'claude-3-5-sonnet-20240620',
'claude-3-5-sonnet-20241022',
'claude-3-5-haiku-20241022',
'claude-3-7-sonnet-20250219',
'claude-sonnet-4-20250514',
'claude-opus-4-20250514',
'google/gemini-2.0-flash-exp',
'google/gemini-2.0-flash',
'google/gemini-2.5-pro',
'google/gemini-2.5-flash',
'deepseek-chat',
'deepseek-reasoner',
'qwen-max',
'qwen-plus',
'qwen-turbo',
'qwen-max-2025-01-25',
'mistralai/mistral-tiny',
'mistralai/mistral-nemo',
'anthracite-org/magnum-v4-72b',
'nvidia/llama-3.1-nemotron-70b-instruct',
'cohere/command-r-plus',
'mistralai/codestral-2501',
'google/gemma-3-4b-it',
'google/gemma-3-12b-it',
'google/gemma-3-27b-it',
'google/gemini-2.5-flash-lite-preview',
'deepseek/deepseek-prover-v2',
'google/gemma-3n-e4b-it',
'cohere/command-a',
'MiniMax-Text-01',
'abab6.5s-chat',
'minimax/m1',
'bagoodex/bagoodex-search-v1',
'moonshot/kimi-k2-preview',
'perplexity/sonar',
'perplexity/sonar-pro',
'x-ai/grok-4-07-09',
'x-ai/grok-3-beta',
'x-ai/grok-3-mini-beta'
],
// OpenRouter available models
// input_modalities: 'text'
// output_modalities: 'text'
// https://openrouter.ai/api/v1/models
openrouter: [
'openai/gpt-4o-mini', // used by default
'01-ai/yi-large',
'aetherwiing/mn-starcannon-12b',
'agentica-org/deepcoder-14b-preview:free',
'ai21/jamba-1.6-large',
'ai21/jamba-1.6-mini',
'aion-labs/aion-1.0',
'aion-labs/aion-1.0-mini',
'aion-labs/aion-rp-llama-3.1-8b',
'alfredpros/codellama-7b-instruct-solidity',
'all-hands/openhands-lm-32b-v0.1',
'alpindale/goliath-120b',
'alpindale/magnum-72b',
'amazon/nova-lite-v1',
'amazon/nova-micro-v1',
'amazon/nova-pro-v1',
'anthracite-org/magnum-v2-72b',
'anthracite-org/magnum-v4-72b',
'anthropic/claude-2',
'anthropic/claude-2.0',
'anthropic/claude-2.0:beta',
'anthropic/claude-2.1',
'anthropic/claude-2.1:beta',
'anthropic/claude-2:beta',
'anthropic/claude-3-haiku',
'anthropic/claude-3-haiku:beta',
'anthropic/claude-3-opus',
'anthropic/claude-3-opus:beta',
'anthropic/claude-3-sonnet',
'anthropic/claude-3-sonnet:beta',
'anthropic/claude-3.5-haiku',
'anthropic/claude-3.5-haiku-20241022',
'anthropic/claude-3.5-haiku-20241022:beta',
'anthropic/claude-3.5-haiku:beta',
'anthropic/claude-3.5-sonnet',
'anthropic/claude-3.5-sonnet-20240620',
'anthropic/claude-3.5-sonnet-20240620:beta',
'anthropic/claude-3.5-sonnet:beta',
'anthropic/claude-3.7-sonnet',
'anthropic/claude-3.7-sonnet:beta',
'anthropic/claude-3.7-sonnet:thinking',
'anthropic/claude-opus-4',
'anthropic/claude-sonnet-4',
'arcee-ai/arcee-blitz',
'arcee-ai/caller-large',
'arcee-ai/coder-large',
'arcee-ai/maestro-reasoning',
'arcee-ai/spotlight',
'arcee-ai/virtuoso-large',
'arcee-ai/virtuoso-medium-v2',
'arliai/qwq-32b-arliai-rpr-v1:free',
'cognitivecomputations/dolphin-mixtral-8x22b',
'cognitivecomputations/dolphin3.0-mistral-24b:free',
'cognitivecomputations/dolphin3.0-r1-mistral-24b:free',
'cohere/command',
'cohere/command-a',
'cohere/command-r',
'cohere/command-r-03-2024',
'cohere/command-r-08-2024',
'cohere/command-r-plus',
'cohere/command-r-plus-04-2024',
'cohere/command-r-plus-08-2024',
'cohere/command-r7b-12-2024',
'deepseek/deepseek-chat',
'deepseek/deepseek-chat-v3-0324',
'deepseek/deepseek-chat-v3-0324:free',
'deepseek/deepseek-chat:free',
'deepseek/deepseek-prover-v2',
'deepseek/deepseek-prover-v2:free',
'deepseek/deepseek-r1',
'deepseek/deepseek-r1-0528',
'deepseek/deepseek-r1-0528-qwen3-8b',
'deepseek/deepseek-r1-0528-qwen3-8b:free',
'deepseek/deepseek-r1-0528:free',
'deepseek/deepseek-r1-distill-llama-70b',
'deepseek/deepseek-r1-distill-llama-70b:free',
'deepseek/deepseek-r1-distill-llama-8b',
'deepseek/deepseek-r1-distill-qwen-1.5b',
'deepseek/deepseek-r1-distill-qwen-14b',
'deepseek/deepseek-r1-distill-qwen-14b:free',
'deepseek/deepseek-r1-distill-qwen-32b',
'deepseek/deepseek-r1-distill-qwen-32b:free',
'deepseek/deepseek-r1-distill-qwen-7b',
'deepseek/deepseek-r1-zero:free',
'deepseek/deepseek-r1:free',
'deepseek/deepseek-v3-base:free',
'eleutherai/llemma_7b',
'eva-unit-01/eva-llama-3.33-70b',
'eva-unit-01/eva-qwen-2.5-32b',
'eva-unit-01/eva-qwen-2.5-72b',
'featherless/qwerky-72b:free',
'google/gemini-2.0-flash-001',
'google/gemini-2.0-flash-exp:free',
'google/gemini-2.0-flash-lite-001',
'google/gemini-2.5-flash-preview',
'google/gemini-2.5-flash-preview-05-20',
'google/gemini-2.5-flash-preview-05-20:thinking',
'google/gemini-2.5-flash-preview:thinking',
'google/gemini-2.5-pro-exp-03-25',
'google/gemini-2.5-pro-preview',
'google/gemini-2.5-pro-preview-05-06',
'google/gemini-flash-1.5',
'google/gemini-flash-1.5-8b',
'google/gemini-pro-1.5',
'google/gemma-2-27b-it',
'google/gemma-2-9b-it',
'google/gemma-2-9b-it:free',
'google/gemma-3-12b-it',
'google/gemma-3-12b-it:free',
'google/gemma-3-1b-it:free',
'google/gemma-3-27b-it',
'google/gemma-3-27b-it:free',
'google/gemma-3-4b-it',
'google/gemma-3-4b-it:free',
'google/gemma-3n-e4b-it:free',
'gryphe/mythomax-l2-13b',
'inception/mercury-coder-small-beta',
'infermatic/mn-inferor-12b',
'inflection/inflection-3-pi',
'inflection/inflection-3-productivity',
'liquid/lfm-3b',
'liquid/lfm-40b',
'liquid/lfm-7b',
'mancer/weaver',
'meta-llama/llama-2-70b-chat',
'meta-llama/llama-3-70b-instruct',
'meta-llama/llama-3-8b-instruct',
'meta-llama/llama-3.1-405b',
'meta-llama/llama-3.1-405b-instruct',
'meta-llama/llama-3.1-405b:free',
'meta-llama/llama-3.1-70b-instruct',
'meta-llama/llama-3.1-8b-instruct',
'meta-llama/llama-3.1-8b-instruct:free',
'meta-llama/llama-3.2-11b-vision-instruct',
'meta-llama/llama-3.2-11b-vision-instruct:free',
'meta-llama/llama-3.2-1b-instruct',
'meta-llama/llama-3.2-1b-instruct:free',
'meta-llama/llama-3.2-3b-instruct',
'meta-llama/llama-3.2-3b-instruct:free',
'meta-llama/llama-3.2-90b-vision-instruct',
'meta-llama/llama-3.3-70b-instruct',
'meta-llama/llama-3.3-70b-instruct:free',
'meta-llama/llama-3.3-8b-instruct:free',
'meta-llama/llama-4-maverick',
'meta-llama/llama-4-maverick:free',
'meta-llama/llama-4-scout',
'meta-llama/llama-4-scout:free',
'meta-llama/llama-guard-2-8b',
'meta-llama/llama-guard-3-8b',
'meta-llama/llama-guard-4-12b',
'microsoft/mai-ds-r1:free',
'microsoft/phi-3-medium-128k-instruct',
'microsoft/phi-3-mini-128k-instruct',
'microsoft/phi-3.5-mini-128k-instruct',
'microsoft/phi-4',
'microsoft/phi-4-multimodal-instruct',
'microsoft/phi-4-reasoning-plus',
'microsoft/phi-4-reasoning-plus:free',
'microsoft/phi-4-reasoning:free',
'microsoft/wizardlm-2-8x22b',
'minimax/minimax-01',
'mistralai/codestral-2501',
'mistralai/devstral-small',
'mistralai/devstral-small:free',
'mistralai/magistral-medium-2506',
'mistralai/magistral-medium-2506:thinking',
'mistralai/magistral-small-2506',
'mistralai/ministral-3b',
'mistralai/ministral-8b',
'mistralai/mistral-7b-instruct',
'mistralai/mistral-7b-instruct-v0.1',
'mistralai/mistral-7b-instruct-v0.2',
'mistralai/mistral-7b-instruct-v0.3',
'mistralai/mistral-7b-instruct:free',
'mistralai/mistral-large',
'mistralai/mistral-large-2407',
'mistralai/mistral-large-2411',
'mistralai/mistral-medium',
'mistralai/mistral-medium-3',
'mistralai/mistral-nemo',
'mistralai/mistral-nemo:free',
'mistralai/mistral-saba',
'mistralai/mistral-small',
'mistralai/mistral-small-24b-instruct-2501',
'mistralai/mistral-small-24b-instruct-2501:free',
'mistralai/mistral-small-3.1-24b-instruct',
'mistralai/mistral-small-3.1-24b-instruct:free',
'mistralai/mistral-tiny',
'mistralai/mixtral-8x22b-instruct',
'mistralai/mixtral-8x7b-instruct',
'mistralai/pixtral-12b',
'mistralai/pixtral-large-2411',
'moonshotai/kimi-vl-a3b-thinking:free',
'moonshotai/moonlight-16b-a3b-instruct:free',
'neversleep/llama-3-lumimaid-70b',
'neversleep/llama-3-lumimaid-8b',
'neversleep/llama-3.1-lumimaid-70b',
'neversleep/llama-3.1-lumimaid-8b',
'neversleep/noromaid-20b',
'nothingiisreal/mn-celeste-12b',
'nousresearch/deephermes-3-llama-3-8b-preview:free',
'nousresearch/deephermes-3-mistral-24b-preview:free',
'nousresearch/hermes-2-pro-llama-3-8b',
'nousresearch/hermes-3-llama-3.1-405b',
'nousresearch/hermes-3-llama-3.1-70b',
'nousresearch/nous-hermes-2-mixtral-8x7b-dpo',
'nvidia/llama-3.1-nemotron-70b-instruct',
'nvidia/llama-3.1-nemotron-ultra-253b-v1',
'nvidia/llama-3.1-nemotron-ultra-253b-v1:free',
'nvidia/llama-3.3-nemotron-super-49b-v1',
'nvidia/llama-3.3-nemotron-super-49b-v1:free',
'open-r1/olympiccoder-32b:free',
'openai/chatgpt-4o-latest',
'openai/codex-mini',
'openai/gpt-3.5-turbo',
'openai/gpt-3.5-turbo-0125',
'openai/gpt-3.5-turbo-0613',
'openai/gpt-3.5-turbo-1106',
'openai/gpt-3.5-turbo-16k',
'openai/gpt-3.5-turbo-instruct',
'openai/gpt-4',
'openai/gpt-4-0314',
'openai/gpt-4-1106-preview',
'openai/gpt-4-turbo',
'openai/gpt-4-turbo-preview',
'openai/gpt-4.1',
'openai/gpt-4.1-mini',
'openai/gpt-4.1-nano',
'openai/gpt-4.5-preview',
'openai/gpt-4o',
'openai/gpt-4o-2024-05-13',
'openai/gpt-4o-2024-08-06',
'openai/gpt-4o-2024-11-20',
'openai/gpt-4o-mini-2024-07-18',
'openai/gpt-4o-mini-search-preview',
'openai/gpt-4o-search-preview',
'openai/gpt-4o:extended',
'openai/o1',
'openai/o1-mini',
'openai/o1-mini-2024-09-12',
'openai/o1-preview',
'openai/o1-preview-2024-09-12',
'openai/o1-pro',
'openai/o3',
'openai/o3-mini',
'openai/o3-mini-high',
'openai/o3-pro',
'openai/o4-mini',
'openai/o4-mini-high',
'opengvlab/internvl3-14b:free',
'opengvlab/internvl3-2b:free',
'openrouter/auto',
'perplexity/llama-3.1-sonar-large-128k-online',
'perplexity/llama-3.1-sonar-small-128k-online',
'perplexity/r1-1776',
'perplexity/sonar',
'perplexity/sonar-deep-research',
'perplexity/sonar-pro',
'perplexity/sonar-reasoning',
'perplexity/sonar-reasoning-pro',
'pygmalionai/mythalion-13b',
'qwen/qwen-2-72b-instruct',
'qwen/qwen-2.5-72b-instruct',
'qwen/qwen-2.5-72b-instruct:free',
'qwen/qwen-2.5-7b-instruct',
'qwen/qwen-2.5-7b-instruct:free',
'qwen/qwen-2.5-coder-32b-instruct',
'qwen/qwen-2.5-coder-32b-instruct:free',
'qwen/qwen-2.5-vl-7b-instruct',
'qwen/qwen-2.5-vl-7b-instruct:free',
'qwen/qwen-max',
'qwen/qwen-plus',
'qwen/qwen-turbo',
'qwen/qwen-vl-max',
'qwen/qwen-vl-plus',
'qwen/qwen2.5-vl-32b-instruct',
'qwen/qwen2.5-vl-32b-instruct:free',
'qwen/qwen2.5-vl-3b-instruct:free',
'qwen/qwen2.5-vl-72b-instruct',
'qwen/qwen2.5-vl-72b-instruct:free',
'qwen/qwen3-14b',
'qwen/qwen3-14b:free',
'qwen/qwen3-235b-a22b',
'qwen/qwen3-235b-a22b:free',
'qwen/qwen3-30b-a3b',
'qwen/qwen3-30b-a3b:free',
'qwen/qwen3-32b',
'qwen/qwen3-32b:free',
'qwen/qwen3-8b',
'qwen/qwen3-8b:free',
'qwen/qwq-32b',
'qwen/qwq-32b-preview',
'qwen/qwq-32b:free',
'raifle/sorcererlm-8x22b',
'rekaai/reka-flash-3:free',
'sao10k/fimbulvetr-11b-v2',
'sao10k/l3-euryale-70b',
'sao10k/l3-lunaris-8b',
'sao10k/l3.1-euryale-70b',
'sao10k/l3.3-euryale-70b',
'sarvamai/sarvam-m:free',
'scb10x/llama3.1-typhoon2-70b-instruct',
'sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b',
'shisa-ai/shisa-v2-llama3.3-70b:free',
'sophosympatheia/midnight-rose-70b',
'thedrummer/anubis-pro-105b-v1',
'thedrummer/rocinante-12b',
'thedrummer/skyfall-36b-v2',
'thedrummer/unslopnemo-12b',
'thedrummer/valkyrie-49b-v1',
'thudm/glm-4-32b',
'thudm/glm-4-32b:free',
'thudm/glm-z1-32b',
'thudm/glm-z1-32b:free',
'thudm/glm-z1-rumination-32b',
'tngtech/deepseek-r1t-chimera:free',
'undi95/remm-slerp-l2-13b',
'undi95/toppy-m-7b',
'x-ai/grok-2-1212',
'x-ai/grok-2-vision-1212',
'x-ai/grok-3-beta',
'x-ai/grok-3-mini-beta',
'x-ai/grok-beta',
'x-ai/grok-vision-beta'
'mistral-moderation-latest',
]
};
@@ -586,20 +145,14 @@ const getDefaultModel = (provider: string | undefined): string => {
return MODEL_LIST.groq[0];
case 'mistral':
return MODEL_LIST.mistral[0];
case 'deepseek':
return MODEL_LIST.deepseek[0];
case 'aimlapi':
return MODEL_LIST.aimlapi[0];
case 'openrouter':
return MODEL_LIST.openrouter[0];
default:
return MODEL_LIST.openai[0];
}
};
export enum DEFAULT_TOKEN_LIMITS {
DEFAULT_MAX_TOKENS_INPUT = 4096,
DEFAULT_MAX_TOKENS_OUTPUT = 500
DEFAULT_MAX_TOKENS_INPUT = 40960,
DEFAULT_MAX_TOKENS_OUTPUT = 4096
}
const validateConfig = (
@@ -631,7 +184,7 @@ export const configValidators = {
validateConfig(
'OCO_API_KEY',
value,
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic" or "deepseek". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
'You need to provide the OCO_API_KEY when OCO_AI_PROVIDER set to "openai" (default) or "ollama" or "mlx" or "azure" or "gemini" or "flowise" or "anthropic". Run `oco config set OCO_API_KEY=your_key OCO_AI_PROVIDER=openai`'
);
return value;
@@ -647,22 +200,6 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS](value) {
try {
// Custom headers must be a valid JSON string
if (typeof value === 'string') {
JSON.parse(value);
}
return value;
} catch (error) {
validateConfig(
CONFIG_KEYS.OCO_API_CUSTOM_HEADERS,
false,
'Must be a valid JSON string of headers'
);
}
},
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) {
value = parseInt(value);
validateConfig(
@@ -695,16 +232,6 @@ export const configValidators = {
return value;
},
[CONFIG_KEYS.OCO_OMIT_SCOPE](value: any) {
validateConfig(
CONFIG_KEYS.OCO_OMIT_SCOPE,
typeof value === 'boolean',
'Must be boolean: true or false'
);
return value;
},
[CONFIG_KEYS.OCO_LANGUAGE](value: any) {
const supportedLanguages = Object.keys(i18n);
@@ -780,12 +307,9 @@ export const configValidators = {
'azure',
'test',
'flowise',
'groq',
'deepseek',
'aimlapi',
'openrouter'
'groq'
].includes(value) || value.startsWith('ollama'),
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
);
return value;
@@ -819,14 +343,6 @@ export const configValidators = {
'Must be true or false'
);
return value;
},
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT](value: any) {
validateConfig(
CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT,
typeof value === 'boolean',
'Must be true or false'
);
}
};
@@ -840,10 +356,7 @@ export enum OCO_AI_PROVIDER_ENUM {
FLOWISE = 'flowise',
GROQ = 'groq',
MISTRAL = 'mistral',
MLX = 'mlx',
DEEPSEEK = 'deepseek',
AIMLAPI = 'aimlapi',
OPENROUTER = 'openrouter'
MLX = 'mlx'
}
export type ConfigType = {
@@ -851,7 +364,6 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
[CONFIG_KEYS.OCO_API_URL]?: string;
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string;
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
[CONFIG_KEYS.OCO_EMOJI]: boolean;
[CONFIG_KEYS.OCO_WHY]: boolean;
@@ -862,9 +374,7 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_AI_PROVIDER]: OCO_AI_PROVIDER_ENUM;
[CONFIG_KEYS.OCO_GITPUSH]: boolean;
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
[CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean;
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean;
};
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
@@ -911,9 +421,7 @@ export const DEFAULT_CONFIG = {
OCO_ONE_LINE_COMMIT: false,
OCO_TEST_MOCK_TYPE: 'commit-message',
OCO_WHY: false,
OCO_OMIT_SCOPE: false,
OCO_GITPUSH: true, // todo: deprecate
OCO_HOOK_AUTO_UNCOMMENT: false
OCO_GITPUSH: true // todo: deprecate
};
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
@@ -936,7 +444,6 @@ const getEnvConfig = (envPath: string) => {
OCO_MODEL: process.env.OCO_MODEL,
OCO_API_URL: process.env.OCO_API_URL,
OCO_API_KEY: process.env.OCO_API_KEY,
OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS,
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
@@ -952,7 +459,6 @@ const getEnvConfig = (envPath: string) => {
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM,
OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT),
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
OCO_OMIT_SCOPE: parseConfigVarValue(process.env.OCO_OMIT_SCOPE),
OCO_GITPUSH: parseConfigVarValue(process.env.OCO_GITPUSH) // todo: deprecate
};
@@ -1078,220 +584,28 @@ export const setConfig = (
outro(`${chalk.green('✔')} config successfully set`);
};
// --- HELP MESSAGE GENERATION ---
function getConfigKeyDetails(key) {
switch (key) {
case CONFIG_KEYS.OCO_MODEL:
return {
description: 'The AI model to use for generating commit messages',
values: MODEL_LIST
};
case CONFIG_KEYS.OCO_AI_PROVIDER:
return {
description: 'The AI provider to use',
values: Object.values(OCO_AI_PROVIDER_ENUM)
};
case CONFIG_KEYS.OCO_PROMPT_MODULE:
return {
description: 'The prompt module to use for commit message generation',
values: Object.values(OCO_PROMPT_MODULE_ENUM)
};
case CONFIG_KEYS.OCO_LANGUAGE:
return {
description: 'The locale to use for commit messages',
values: Object.keys(i18n)
};
case CONFIG_KEYS.OCO_TEST_MOCK_TYPE:
return {
description: 'The type of test mock to use',
values: ['commit-message', 'prompt-module-commitlint-config']
};
case CONFIG_KEYS.OCO_ONE_LINE_COMMIT:
return {
description: 'One line commit message',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_DESCRIPTION:
return {
description:
'Postface a message with ~3 sentences description of the changes',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_EMOJI:
return {
description: 'Preface a message with GitMoji',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_WHY:
return {
description:
'Output a short description of why the changes were done after the commit message (default: false)',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_OMIT_SCOPE:
return {
description: 'Do not include a scope in the commit message',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_GITPUSH:
return {
description:
'Push to git after commit (deprecated). If false, oco will exit after committing',
values: ['true', 'false']
};
case CONFIG_KEYS.OCO_TOKENS_MAX_INPUT:
return {
description: 'Max model token limit',
values: ['Any positive integer']
};
case CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT:
return {
description: 'Max response tokens',
values: ['Any positive integer']
};
case CONFIG_KEYS.OCO_API_KEY:
return {
description: 'API key for the selected provider',
values: ['String (required for most providers)']
};
case CONFIG_KEYS.OCO_API_URL:
return {
description:
'Custom API URL - may be used to set proxy path to OpenAI API',
values: ["URL string (must start with 'http://' or 'https://')"]
};
case CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
return {
description: 'Message template placeholder',
values: ['String (must start with $)']
};
case CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT:
return {
description: 'Automatically uncomment the commit message in the hook',
values: ['true', 'false']
};
default:
return {
description: 'String value',
values: ['Any string']
};
}
}
function printConfigKeyHelp(param) {
if (!Object.values(CONFIG_KEYS).includes(param)) {
console.log(chalk.red(`Unknown config parameter: ${param}`));
return;
}
const details = getConfigKeyDetails(param as CONFIG_KEYS);
let desc = details.description;
let defaultValue = undefined;
if (param in DEFAULT_CONFIG) {
defaultValue = DEFAULT_CONFIG[param];
}
console.log(chalk.bold(`\n${param}:`));
console.log(chalk.gray(` Description: ${desc}`));
if (defaultValue !== undefined) {
// Print booleans and numbers as-is, strings without quotes
if (typeof defaultValue === 'string') {
console.log(chalk.gray(` Default: ${defaultValue}`));
} else {
console.log(chalk.gray(` Default: ${defaultValue}`));
}
}
if (Array.isArray(details.values)) {
console.log(chalk.gray(' Accepted values:'));
details.values.forEach((value) => {
console.log(chalk.gray(` - ${value}`));
});
} else {
console.log(chalk.gray(' Accepted values by provider:'));
Object.entries(details.values).forEach(([provider, values]) => {
console.log(chalk.gray(` ${provider}:`));
(values as string[]).forEach((value) => {
console.log(chalk.gray(` - ${value}`));
});
});
}
}
function printAllConfigHelp() {
console.log(chalk.bold('Available config parameters:'));
for (const key of Object.values(CONFIG_KEYS).sort()) {
const details = getConfigKeyDetails(key);
// Try to get the default value from DEFAULT_CONFIG
let defaultValue = undefined;
if (key in DEFAULT_CONFIG) {
defaultValue = DEFAULT_CONFIG[key];
}
console.log(chalk.bold(`\n${key}:`));
console.log(chalk.gray(` Description: ${details.description}`));
if (defaultValue !== undefined) {
if (typeof defaultValue === 'string') {
console.log(chalk.gray(` Default: ${defaultValue}`));
} else {
console.log(chalk.gray(` Default: ${defaultValue}`));
}
}
}
console.log(
chalk.yellow(
'\nUse "oco config describe [PARAMETER]" to see accepted values and more details for a specific config parameter.'
)
);
}
export const configCommand = command(
{
name: COMMANDS.config,
parameters: ['<mode>', '[key=values...]'],
help: {
description: 'Configure opencommit settings',
examples: [
'Describe all config parameters: oco config describe',
'Describe a specific parameter: oco config describe OCO_MODEL',
'Get a config value: oco config get OCO_MODEL',
'Set a config value: oco config set OCO_MODEL=gpt-4'
]
}
parameters: ['<mode>', '<key=values...>']
},
async (argv) => {
try {
const { mode, keyValues } = argv._;
intro(`COMMAND: config ${mode} ${keyValues}`);
if (mode === CONFIG_MODES.describe) {
if (!keyValues || keyValues.length === 0) {
printAllConfigHelp();
} else {
for (const key of keyValues) {
printConfigKeyHelp(key);
}
}
process.exit(0);
} else if (mode === CONFIG_MODES.get) {
if (!keyValues || keyValues.length === 0) {
throw new Error('No config keys specified for get mode');
}
if (mode === CONFIG_MODES.get) {
const config = getConfig() || {};
for (const key of keyValues) {
outro(`${key}=${config[key as keyof typeof config]}`);
}
} else if (mode === CONFIG_MODES.set) {
if (!keyValues || keyValues.length === 0) {
throw new Error('No config keys specified for set mode');
}
await setConfig(
keyValues.map((keyValue) => keyValue.split('=') as [string, string])
);
} else {
throw new Error(
`Unsupported mode: ${mode}. Valid modes are: "set", "get", and "describe"`
`Unsupported mode: ${mode}. Valid modes are: "set" and "get"`
);
}
} catch (error) {

View File

@@ -56,14 +56,10 @@ export const prepareCommitMessageHook = async (
const fileContent = await fs.readFile(messageFilePath);
const messageWithComment = `# ${commitMessage}\n\n# ---------- [OpenCommit] ---------- #\n# Remove the # above to use this generated commit message.\n# To cancel the commit, just close this window without making any changes.\n\n${fileContent.toString()}`;
const messageWithoutComment = `${commitMessage}\n\n${fileContent.toString()}`;
const message = config.OCO_HOOK_AUTO_UNCOMMENT
? messageWithoutComment
: messageWithComment;
await fs.writeFile(messageFilePath, message);
await fs.writeFile(
messageFilePath,
commitMessage + '\n' + fileContent.toString()
);
} catch (error) {
outro(`${chalk.red('✖')} ${error}`);
process.exit(1);

View File

@@ -11,7 +11,6 @@ export interface AiEngineConfig {
maxTokensOutput: number;
maxTokensInput: number;
baseURL?: string;
customHeaders?: Record<string, string>;
}
type Client =

View File

@@ -1,47 +0,0 @@
import OpenAI from 'openai';
import axios, { AxiosInstance } from 'axios';
import { AiEngine, AiEngineConfig } from './Engine';
interface AimlApiConfig extends AiEngineConfig {}
export class AimlApiEngine implements AiEngine {
client: AxiosInstance;
constructor(public config: AimlApiConfig) {
this.client = axios.create({
baseURL: config.baseURL || 'https://api.aimlapi.com/v1/chat/completions',
headers: {
Authorization: `Bearer ${config.apiKey}`,
'HTTP-Referer': 'https://github.com/di-sukharev/opencommit',
'X-Title': 'opencommit',
'Content-Type': 'application/json',
...config.customHeaders
}
});
}
public generateCommitMessage = async (
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | null> => {
try {
const response = await this.client.post('', {
model: this.config.model,
messages
});
const message = response.data.choices?.[0]?.message;
return message?.content ?? null;
} catch (error) {
const err = error as Error;
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const apiError = error.response.data.error;
if (apiError) throw new Error(apiError.message);
}
throw err;
}
};
}

View File

@@ -8,7 +8,6 @@ import axios from 'axios';
import chalk from 'chalk';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -55,8 +54,8 @@ export class AnthropicEngine implements AiEngine {
const data = await this.client.messages.create(params);
const message = data?.content[0].text;
let content = message;
return removeContentTags(content, 'think');
return message;
} catch (error) {
const err = error as Error;
outro(`${chalk.red('✖')} ${err?.message || err}`);

View File

@@ -7,7 +7,6 @@ import axios from 'axios';
import chalk from 'chalk';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -53,9 +52,7 @@ export class AzureEngine implements AiEngine {
if (message?.content === null) {
return undefined;
}
let content = message?.content;
return removeContentTags(content, 'think');
return message?.content;
} catch (error) {
outro(`${chalk.red('✖')} ${this.config.model}`);

View File

@@ -1,61 +0,0 @@
import axios from 'axios';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { OpenAiEngine, OpenAiConfig } from './openAi';
export interface DeepseekConfig extends OpenAiConfig {}
export class DeepseekEngine extends OpenAiEngine {
constructor(config: DeepseekConfig) {
// Call OpenAIEngine constructor with forced Deepseek baseURL
super({
...config,
baseURL: 'https://api.deepseek.com/v1'
});
}
// Identical method from OpenAiEngine, re-implemented here
public generateCommitMessage = async (
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | null> => {
const params = {
model: this.config.model,
messages,
temperature: 0,
top_p: 0.1,
max_tokens: this.config.maxTokensOutput
};
try {
const REQUEST_TOKENS = messages
.map((msg) => tokenCount(msg.content as string) + 4)
.reduce((a, b) => a + b, 0);
if (
REQUEST_TOKENS >
this.config.maxTokensInput - this.config.maxTokensOutput
)
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
const completion = await this.client.chat.completions.create(params);
const message = completion.choices[0].message;
let content = message?.content;
return removeContentTags(content, 'think');
} catch (error) {
const err = error as Error;
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const openAiError = error.response.data.error;
if (openAiError) throw new Error(openAiError.message);
}
throw err;
}
};
}

View File

@@ -1,6 +1,5 @@
import axios, { AxiosInstance } from 'axios';
import { OpenAI } from 'openai';
import { removeContentTags } from '../utils/removeContentTags';
import { AiEngine, AiEngineConfig } from './Engine';
interface FlowiseAiConfig extends AiEngineConfig {}
@@ -37,8 +36,7 @@ export class FlowiseEngine implements AiEngine {
try {
const response = await this.client.post('', payload);
const message = response.data;
let content = message?.text;
return removeContentTags(content, 'think');
return message?.text;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error('local model issues. details: ' + message);

View File

@@ -7,7 +7,6 @@ import {
} from '@google/generative-ai';
import axios from 'axios';
import { OpenAI } from 'openai';
import { removeContentTags } from '../utils/removeContentTags';
import { AiEngine, AiEngineConfig } from './Engine';
interface GeminiConfig extends AiEngineConfig {}
@@ -72,8 +71,7 @@ export class GeminiEngine implements AiEngine {
}
});
const content = result.response.text();
return removeContentTags(content, 'think');
return result.response.text();
} catch (error) {
const err = error as Error;
if (

View File

@@ -7,4 +7,4 @@ export class GroqEngine extends OpenAiEngine {
config.baseURL = 'https://api.groq.com/openai/v1';
super(config);
}
}
}

View File

@@ -1,21 +1,27 @@
import axios from 'axios';
import { Mistral } from '@mistralai/mistralai';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
import {
AssistantMessage as MistralAssistantMessage,
SystemMessage as MistralSystemMessage,
ToolMessage as MistralToolMessage,
UserMessage as MistralUserMessage
} from '@mistralai/mistralai/models/components';
// Using any for Mistral types to avoid type declaration issues
export interface MistralAiConfig extends AiEngineConfig {}
export type MistralCompletionMessageParam = Array<any>;
// Import Mistral dynamically to avoid TS errors
// eslint-disable-next-line @typescript-eslint/no-var-requires
const Mistral = require('@mistralai/mistralai').Mistral;
export type MistralCompletionMessageParam = Array<
| (MistralSystemMessage & { role: "system" })
| (MistralUserMessage & { role: "user" })
| (MistralAssistantMessage & { role: "assistant" })
| (MistralToolMessage & { role: "tool" })
>
export class MistralAiEngine implements AiEngine {
config: MistralAiConfig;
client: any; // Using any type for Mistral client to avoid TS errors
client: Mistral;
constructor(config: MistralAiConfig) {
this.config = config;
@@ -23,10 +29,7 @@ export class MistralAiEngine implements AiEngine {
if (!config.baseURL) {
this.client = new Mistral({ apiKey: config.apiKey });
} else {
this.client = new Mistral({
apiKey: config.apiKey,
serverURL: config.baseURL
});
this.client = new Mistral({ apiKey: config.apiKey, serverURL: config.baseURL });
}
}
@@ -53,15 +56,15 @@ export class MistralAiEngine implements AiEngine {
const completion = await this.client.chat.complete(params);
if (!completion.choices) throw Error('No completion choice available.');
if (!completion.choices)
throw Error('No completion choice available.')
const message = completion.choices[0].message;
if (!message || !message.content)
throw Error('No completion choice available.');
throw Error('No completion choice available.')
let content = message.content as string;
return removeContentTags(content, 'think');
return message.content as string;
} catch (error) {
const err = error as Error;
if (

View File

@@ -1,47 +1,47 @@
import axios, { AxiosInstance } from 'axios';
import { OpenAI } from 'openai';
import { removeContentTags } from '../utils/removeContentTags';
import { AiEngine, AiEngineConfig } from './Engine';
import { chown } from 'fs';
interface MLXConfig extends AiEngineConfig {}
export class MLXEngine implements AiEngine {
config: MLXConfig;
client: AxiosInstance;
config: MLXConfig;
client: AxiosInstance;
constructor(config) {
this.config = config;
this.client = axios.create({
url: config.baseURL
? `${config.baseURL}/${config.apiKey}`
: 'http://localhost:8080/v1/chat/completions',
headers: { 'Content-Type': 'application/json' }
});
}
async generateCommitMessage(
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | undefined> {
const params = {
messages,
temperature: 0,
top_p: 0.1,
repetition_penalty: 1.5,
stream: false
};
try {
const response = await this.client.post(
this.client.getUri(this.config),
params
);
const choices = response.data.choices;
const message = choices[0].message;
let content = message?.content;
return removeContentTags(content, 'think');
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error(`MLX provider error: ${message}`);
constructor(config) {
this.config = config;
this.client = axios.create({
url: config.baseURL
? `${config.baseURL}/${config.apiKey}`
: 'http://localhost:8080/v1/chat/completions',
headers: { 'Content-Type': 'application/json' }
});
}
}
}
async generateCommitMessage(
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>):
Promise<string | undefined> {
const params = {
messages,
temperature: 0,
top_p: 0.1,
repetition_penalty: 1.5,
stream: false
};
try {
const response = await this.client.post(
this.client.getUri(this.config),
params
);
const choices = response.data.choices;
const message = choices[0].message;
return message?.content;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error(`MLX provider error: ${message}`);
}
}
}

View File

@@ -1,6 +1,5 @@
import axios, { AxiosInstance } from 'axios';
import { OpenAI } from 'openai';
import { removeContentTags } from '../utils/removeContentTags';
import { AiEngine, AiEngineConfig } from './Engine';
interface OllamaConfig extends AiEngineConfig {}
@@ -11,18 +10,11 @@ export class OllamaEngine implements AiEngine {
constructor(config) {
this.config = config;
// Combine base headers with custom headers
const headers = {
'Content-Type': 'application/json',
...config.customHeaders
};
this.client = axios.create({
url: config.baseURL
? `${config.baseURL}/${config.apiKey}`
: 'http://localhost:11434/api/chat',
headers
headers: { 'Content-Type': 'application/json' }
});
}
@@ -41,9 +33,9 @@ export class OllamaEngine implements AiEngine {
params
);
const { message } = response.data;
let content = message?.content;
return removeContentTags(content, 'think');
const message = response.data.message;
return message?.content;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error(`Ollama provider error: ${message}`);

View File

@@ -1,8 +1,6 @@
import axios from 'axios';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { parseCustomHeaders } from '../utils/engine';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -15,22 +13,11 @@ export class OpenAiEngine implements AiEngine {
constructor(config: OpenAiConfig) {
this.config = config;
const clientOptions: OpenAI.ClientOptions = {
apiKey: config.apiKey
};
if (config.baseURL) {
clientOptions.baseURL = config.baseURL;
if (!config.baseURL) {
this.client = new OpenAI({ apiKey: config.apiKey });
} else {
this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
}
if (config.customHeaders) {
const headers = parseCustomHeaders(config.customHeaders);
if (Object.keys(headers).length > 0) {
clientOptions.defaultHeaders = headers;
}
}
this.client = new OpenAI(clientOptions);
}
public generateCommitMessage = async (
@@ -58,8 +45,8 @@ export class OpenAiEngine implements AiEngine {
const completion = await this.client.chat.completions.create(params);
const message = completion.choices[0].message;
let content = message?.content;
return removeContentTags(content, 'think');
return message?.content;
} catch (error) {
const err = error as Error;
if (

View File

@@ -1,49 +0,0 @@
import OpenAI from 'openai';
import { AiEngine, AiEngineConfig } from './Engine';
import axios, { AxiosInstance } from 'axios';
import { removeContentTags } from '../utils/removeContentTags';
interface OpenRouterConfig extends AiEngineConfig {}
export class OpenRouterEngine implements AiEngine {
client: AxiosInstance;
constructor(public config: OpenRouterConfig) {
this.client = axios.create({
baseURL: 'https://openrouter.ai/api/v1/chat/completions',
headers: {
Authorization: `Bearer ${config.apiKey}`,
'HTTP-Referer': 'https://github.com/di-sukharev/opencommit',
'X-Title': 'OpenCommit',
'Content-Type': 'application/json'
}
});
}
public generateCommitMessage = async (
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
): Promise<string | null> => {
try {
const response = await this.client.post('', {
model: this.config.model,
messages
});
const message = response.data.choices[0].message;
let content = message?.content;
return removeContentTags(content, 'think');
} catch (error) {
const err = error as Error;
if (
axios.isAxiosError<{ error?: { message: string } }>(error) &&
error.response?.status === 401
) {
const openRouterError = error.response.data.error;
if (openRouterError) throw new Error(openRouterError.message);
}
throw err;
}
};
}

View File

@@ -14,10 +14,7 @@ const generateCommitMessageChatCompletionPrompt = async (
fullGitMojiSpec: boolean,
context: string
): Promise<Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>> => {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
fullGitMojiSpec,
context
);
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec, context);
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
@@ -41,7 +38,7 @@ const ADJUSTMENT_FACTOR = 20;
export const generateCommitMessageByDiff = async (
diff: string,
fullGitMojiSpec: boolean = false,
context: string = ''
context: string = ""
): Promise<string> => {
try {
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
@@ -78,7 +75,7 @@ export const generateCommitMessageByDiff = async (
const messages = await generateCommitMessageChatCompletionPrompt(
diff,
fullGitMojiSpec,
context
context,
);
const engine = getEngine();

View File

@@ -2,7 +2,5 @@
"localLanguage": "česky",
"commitFix": "fix(server.ts): zlepšení velikosti proměnné port na velká písmena PORT",
"commitFeat": "feat(server.ts): přidání podpory pro proměnnou prostředí process.env.PORT",
"commitDescription": "Proměnná port se nyní jmenuje PORT, což odpovídá konvenci pojmenování, protože PORT je konstanta. Podpora proměnné prostředí process.env.PORT umožňuje snadnější správu nastavení při spuštění.",
"commitFixOmitScope": "fix: zlepšení velikosti proměnné port na velká písmena PORT",
"commitFeatOmitScope": "feat: přidání podpory pro proměnnou prostředí process.env.PORT"
"commitDescription": "Proměnná port se nyní jmenuje PORT, což odpovídá konvenci pojmenování, protože PORT je konstanta. Podpora proměnné prostředí process.env.PORT umožňuje snadnější správu nastavení při spuštění."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "Deutsch",
"commitFix": "fix(server.ts): Ändere die Groß- und Kleinschreibung der Port-Variable von Kleinbuchstaben auf Großbuchstaben PORT.",
"commitFeat": "Funktion(server.ts): Unterstützung für die Umgebungsvariable process.env.PORT hinzufügen",
"commitDescription": "Die Port-Variable heißt jetzt PORT, was die Konsistenz mit den Namenskonventionen verbessert, da PORT eine Konstante ist. Die Unterstützung für eine Umgebungsvariable ermöglicht es der Anwendung, flexibler zu sein, da sie jetzt auf jedem verfügbaren Port laufen kann, der über die Umgebungsvariable process.env.PORT angegeben wird.",
"commitFixOmitScope": "fix: Ändere die Groß- und Kleinschreibung der Port-Variable von Kleinbuchstaben auf Großbuchstaben PORT.",
"commitFeatOmitScope": "Funktion: Unterstützung für die Umgebungsvariable process.env.PORT hinzufügen"
"commitDescription": "Die Port-Variable heißt jetzt PORT, was die Konsistenz mit den Namenskonventionen verbessert, da PORT eine Konstante ist. Die Unterstützung für eine Umgebungsvariable ermöglicht es der Anwendung, flexibler zu sein, da sie jetzt auf jedem verfügbaren Port laufen kann, der über die Umgebungsvariable process.env.PORT angegeben wird."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "english",
"commitFix": "fix(server.ts): change port variable case from lowercase port to uppercase PORT to improve semantics",
"commitFeat": "feat(server.ts): add support for process.env.PORT environment variable to be able to run app on a configurable port",
"commitDescription": "The port variable is now named PORT, which improves consistency with the naming conventions as PORT is a constant. Support for an environment variable allows the application to be more flexible as it can now run on any available port specified via the process.env.PORT environment variable.",
"commitFixOmitScope": "fix: change port variable case from lowercase port to uppercase PORT to improve semantics",
"commitFeatOmitScope": "feat: add support for process.env.PORT environment variable to be able to run app on a configurable port"
"commitDescription": "The port variable is now named PORT, which improves consistency with the naming conventions as PORT is a constant. Support for an environment variable allows the application to be more flexible as it can now run on any available port specified via the process.env.PORT environment variable."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "spanish",
"commitFix": "fix(server.ts): cambiar la variable port de minúsculas a mayúsculas PORT",
"commitFeat": "feat(server.ts): añadir soporte para la variable de entorno process.env.PORT",
"commitDescription": "La variable port ahora se llama PORT, lo que mejora la coherencia con las convenciones de nomenclatura, ya que PORT es una constante. El soporte para una variable de entorno permite que la aplicación sea más flexible, ya que ahora puede ejecutarse en cualquier puerto disponible especificado a través de la variable de entorno process.env.PORT.",
"commitFixOmitScope": "fix: cambiar la variable port de minúsculas a mayúsculas PORT",
"commitFeatOmitScope": "feat: añadir soporte para la variable de entorno process.env.PORT"
"commitDescription": "La variable port ahora se llama PORT, lo que mejora la coherencia con las convenciones de nomenclatura, ya que PORT es una constante. El soporte para una variable de entorno permite que la aplicación sea más flexible, ya que ahora puede ejecutarse en cualquier puerto disponible especificado a través de la variable de entorno process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "française",
"commitFix": "corriger(server.ts) : changer la casse de la variable de port de minuscules à majuscules (PORT)",
"commitFeat": "fonctionnalité(server.ts) : ajouter la prise en charge de la variable d'environnement process.env.PORT",
"commitDescription": "La variable de port est maintenant nommée PORT, ce qui améliore la cohérence avec les conventions de nommage car PORT est une constante. La prise en charge d'une variable d'environnement permet à l'application d'être plus flexible car elle peut maintenant s'exécuter sur n'importe quel port disponible spécifié via la variable d'environnement process.env.PORT.",
"commitFixOmitScope": "corriger : changer la casse de la variable de port de minuscules à majuscules (PORT)",
"commitFeatOmitScope": "fonctionnalité : ajouter la prise en charge de la variable d'environnement process.env.PORT"
"commitDescription": "La variable de port est maintenant nommée PORT, ce qui améliore la cohérence avec les conventions de nommage car PORT est une constante. La prise en charge d'une variable d'environnement permet à l'application d'être plus flexible car elle peut maintenant s'exécuter sur n'importe quel port disponible spécifié via la variable d'environnement process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "bahasa",
"commitFix": "fix(server.ts): mengubah huruf port variable dari huruf kecil ke huruf besar PORT",
"commitFeat": "feat(server.ts): menambahkan support di process.env.PORT environment variabel",
"commitDescription": "Port variabel bernama PORT, yang membantu konsistensi dengan memberi nama yaitu PORT yang konstan. Bantuan environment variabel membantu aplikasi lebih fleksibel, dan dapat di jalankan di port manapun yang tertulis pada process.env.PORT",
"commitFixOmitScope": "fix: mengubah huruf port variable dari huruf kecil ke huruf besar PORT",
"commitFeatOmitScope": "feat: menambahkan support di process.env.PORT environment variabel"
"commitDescription": "Port variabel bernama PORT, yang membantu konsistensi dengan memberi nama yaitu PORT yang konstan. Bantuan environment variabel membantu aplikasi lebih fleksibel, dan dapat di jalankan di port manapun yang tertulis pada process.env.PORT"
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "italiano",
"commitFix": "fix(server.ts): cambia la grafia della variabile della porta dal minuscolo port al maiuscolo PORT",
"commitFeat": "feat(server.ts): aggiunge il supporto per la variabile di ambiente process.env.PORT",
"commitDescription": "La variabile port è ora chiamata PORT, migliorando la coerenza con le convenzioni di denominazione in quanto PORT è una costante. Il supporto per una variabile di ambiente consente all'applicazione di essere più flessibile poiché ora può essere eseguita su qualsiasi porta disponibile specificata tramite la variabile di ambiente process.env.PORT.",
"commitFixOmitScope": "fix: cambia la grafia della variabile della porta dal minuscolo port al maiuscolo PORT",
"commitFeatOmitScope": "feat: aggiunge il supporto per la variabile di ambiente process.env.PORT"
"commitDescription": "La variabile port è ora chiamata PORT, migliorando la coerenza con le convenzioni di denominazione in quanto PORT è una costante. Il supporto per una variabile di ambiente consente all'applicazione di essere più flessibile poiché ora può essere eseguita su qualsiasi porta disponibile specificata tramite la variabile di ambiente process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "日本語",
"commitFix": "修正(server.ts): ポート変数を小文字のportから大文字のPORTに変更",
"commitFeat": "新機能(server.ts): 環境変数process.env.PORTのサポートを追加",
"commitDescription": "ポート変数は現在PORTという名前になり、定数であるPORTを使うことで命名規則に一貫性が生まれました。環境変数をサポートすることで、環境変数process.env.PORTで指定された任意の利用可能なポートで実行できるようになり、アプリケーションはより柔軟になりました。",
"commitFixOmitScope": "修正: ポート変数を小文字のportから大文字のPORTに変更",
"commitFeatOmitScope": "新機能: 環境変数process.env.PORTのサポートを追加"
"commitDescription": "ポート変数は現在PORTという名前になり、定数であるPORTを使うことで命名規則に一貫性が生まれました。環境変数をサポートすることで、環境変数process.env.PORTで指定された任意の利用可能なポートで実行できるようになり、アプリケーションはより柔軟になりました。"
}

View File

@@ -1,8 +1,6 @@
{
"localLanguage": "한국어",
"commitFix": "fix(server.ts): 포트 변수를 소문자 port에서 대문자 PORT로 변경",
"commitFeat": "feat(server.ts): process.env.PORT 환경 변수 지원 추가",
"commitDescription": "포트 변수는 이제 PORT로 이름이 지정되어 상수인 PORT와 일관성 있는 이름 규칙을 따릅니다. 환경 변수 지원을 통해 애플리케이션은 이제 process.env.PORT 환경 변수로 지정된 사용 가능한 모든 포트에서 실행할 수 있으므로 더 유연해졌습니다.",
"commitFixOmitScope": "fix: 포트 변수를 소문자 port에서 대문자 PORT로 변경",
"commitFeatOmitScope": "feat: process.env.PORT 환경 변수 지원 추가"
"commitFeat": "피트(server.ts): process.env.PORT 환경 변수 지원 추가",
"commitDescription": "포트 변수는 이제 PORT로 이름이 지정되어 상수인 PORT와 일관성 있는 이름 규칙을 따릅니다. 환경 변수 지원을 통해 애플리케이션은 이제 process.env.PORT 환경 변수로 지정된 사용 가능한 모든 포트에서 실행할 수 있으므로 더 유연해졌습니다."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "Nederlands",
"commitFix": "fix(server.ts): verander poortvariabele van kleine letters poort naar hoofdletters PORT",
"commitFeat": "feat(server.ts): voeg ondersteuning toe voor process.env.PORT omgevingsvariabele",
"commitDescription": "De poortvariabele heet nu PORT, wat de consistentie met de naamgevingsconventies verbetert omdat PORT een constante is. Ondersteuning voor een omgevingsvariabele maakt de applicatie flexibeler, omdat deze nu kan draaien op elke beschikbare poort die is gespecificeerd via de process.env.PORT omgevingsvariabele.",
"commitFixOmitScope": "fix: verander poortvariabele van kleine letters poort naar hoofdletters PORT",
"commitFeatOmitScope": "feat: voeg ondersteuning toe voor process.env.PORT omgevingsvariabele"
"commitDescription": "De poortvariabele heet nu PORT, wat de consistentie met de naamgevingsconventies verbetert omdat PORT een constante is. Ondersteuning voor een omgevingsvariabele maakt de applicatie flexibeler, omdat deze nu kan draaien op elke beschikbare poort die is gespecificeerd via de process.env.PORT omgevingsvariabele."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "polski",
"commitFix": "fix(server.ts): poprawa wielkości zmiennej port na pisane z dużymi literami PORT",
"commitFeat": "feat(server.ts): dodanie obsługi zmiennej środowiskowej process.env.PORT",
"commitDescription": "Zmienna port jest teraz nazwana PORT, co jest zgodne z konwencją nazewniczą ponieważ PORT jest stałą. Obsługa zmiennej środowiskowej process.env.PORT pozwala łatwiej zarządzać ustawieniami przy starcie.",
"commitFixOmitScope": "fix: poprawa wielkości zmiennej port na pisane z dużymi literami PORT",
"commitFeatOmitScope": "feat: dodanie obsługi zmiennej środowiskowej process.env.PORT"
"commitDescription": "Zmienna port jest teraz nazwana PORT, co jest zgodne z konwencją nazewniczą ponieważ PORT jest stałą. Obsługa zmiennej środowiskowej process.env.PORT pozwala łatwiej zarządzać ustawieniami przy starcie."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "português",
"commitFix": "fix(server.ts): altera o caso da variável de porta de port minúscula para PORT maiúscula",
"commitFeat": "feat(server.ts): adiciona suporte para a variável de ambiente process.env.PORT",
"commitDescription": "A variável de porta agora é denominada PORT, o que melhora a consistência com as convenções de nomenclatura, pois PORT é uma constante. O suporte para uma variável de ambiente permite que o aplicativo seja mais flexível, pois agora pode ser executado em qualquer porta disponível especificada por meio da variável de ambiente process.env.PORT.",
"commitFixOmitScope": "fix: altera o caso da variável de porta de port minúscula para PORT maiúscula",
"commitFeatOmitScope": "feat: adiciona suporte para a variável de ambiente process.env.PORT"
"commitDescription": "A variável de porta agora é denominada PORT, o que melhora a consistência com as convenções de nomenclatura, pois PORT é uma constante. O suporte para uma variável de ambiente permite que o aplicativo seja mais flexível, pois agora pode ser executado em qualquer porta disponível especificada por meio da variável de ambiente process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "русский",
"commitFix": "fix(server.ts): изменение регистра переменной порта с нижнего регистра port на верхний регистр PORT",
"commitFeat": "feat(server.ts): добавлена поддержка переменной окружения process.env.PORT",
"commitDescription": "Переменная port теперь называется PORT, что улучшает согласованность с соглашениями об именовании констант. Поддержка переменной окружения позволяет приложению быть более гибким, запускаясь на любом доступном порту, указанном с помощью переменной окружения process.env.PORT.",
"commitFixOmitScope": "fix: изменение регистра переменной порта с нижнего регистра port на верхний регистр PORT",
"commitFeatOmitScope": "feat: добавлена поддержка переменной окружения process.env.PORT"
"commitDescription": "Переменная port теперь называется PORT, что улучшает согласованность с соглашениями об именовании констант. Поддержка переменной окружения позволяет приложению быть более гибким, запускаясь на любом доступном порту, указанном с помощью переменной окружения process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "svenska",
"commitFix": "fixa(server.ts): ändra variabelnamnet för port från små bokstäver till stora bokstäver PORT",
"commitFeat": "nyhet(server.ts): lägg till stöd för process.env.PORT miljövariabel",
"commitDescription": "Variabeln som innehåller portnumret heter nu PORT vilket förbättrar konsekvensen med namngivningskonventionerna eftersom PORT är en konstant. Stöd för en miljövariabel gör att applikationen kan vara mer flexibel då den nu kan köras på vilken port som helst som specificeras via miljövariabeln process.env.PORT.",
"commitFixOmitScope": "fixa: ändra variabelnamnet för port från små bokstäver till stora bokstäver PORT",
"commitFeatOmitScope": "nyhet: lägg till stöd för process.env.PORT miljövariabel"
"commitDescription": "Variabeln som innehåller portnumret heter nu PORT vilket förbättrar konsekvensen med namngivningskonventionerna eftersom PORT är en konstant. Stöd för en miljövariabel gör att applikationen kan vara mer flexibel då den nu kan köras på vilken port som helst som specificeras via miljövariabeln process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "ไทย",
"commitFix": "fix(server.ts): เปลี่ยนตัวพิมพ์ของตัวแปร จากตัวพิมพ์เล็ก port เป็นตัวพิมพ์ใหญ่ PORT",
"commitFeat": "feat(server.ts): เพิ่มการรองรับสำหรับตัวแปรสภาพแวดล้อม process.env.PORT",
"commitDescription": "ตอนนี้ตัวแปรพอร์ตมีชื่อว่า PORT, ซึ่งปรับปรุงความสอดคล้องกับหลักการตั้งชื่อเนื่องจาก PORT เป็นค่าคงที่. การสนับสนุนสำหรับตัวแปรสภาพแวดล้อม ช่วยให้แอปพลิเคชันมีความยืดหยุ่นมากขึ้นเนื่องจาก สามารถทำงานบนพอร์ตใด ๆ ตามที่กำหนด ซึ่งระบุผ่านตัวแปรสภาพแวดล้อม process.env.PORT",
"commitFixOmitScope": "fix: เปลี่ยนตัวพิมพ์ของตัวแปร จากตัวพิมพ์เล็ก port เป็นตัวพิมพ์ใหญ่ PORT",
"commitFeatOmitScope": "feat: เพิ่มการรองรับสำหรับตัวแปรสภาพแวดล้อม process.env.PORT"
"commitDescription": "ตอนนี้ตัวแปรพอร์ตมีชื่อว่า PORT, ซึ่งปรับปรุงความสอดคล้องกับหลักการตั้งชื่อเนื่องจาก PORT เป็นค่าคงที่. การสนับสนุนสำหรับตัวแปรสภาพแวดล้อม ช่วยให้แอปพลิเคชันมีความยืดหยุ่นมากขึ้นเนื่องจาก สามารถทำงานบนพอร์ตใด ๆ ตามที่กำหนด ซึ่งระบุผ่านตัวแปรสภาพแวดล้อม process.env.PORT"
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "Turkish",
"commitFix": "fix(server.ts): port değişkeni küçük harfli porttan büyük harfli PORT'a değiştirildi",
"commitFeat": "feat(server.ts): process.env.PORT ortam değişkeni için destek eklendi.",
"commitDescription": "Bağlantı noktası değişkeni artık PORT olarak adlandırıldı ve PORT bir sabit değişken olduğu için bu adlandırma tutarlılığı artırır. Ortam değişkeni desteği, artık process.env.PORT ortam değişkeni aracılığıyla belirtilen herhangi bir kullanılabilir bağlantı noktasında çalışabileceğinden uygulamanın daha esnek olmasını sağlar.",
"commitFixOmitScope": "fix: port değişkeni küçük harfli porttan büyük harfli PORT'a değiştirildi",
"commitFeatOmitScope": "feat: process.env.PORT ortam değişkeni için destek eklendi."
"commitDescription": "Bağlantı noktası değişkeni artık PORT olarak adlandırıldı ve PORT bir sabit değişken olduğu için bu adlandırma tutarlılığı artırır. Ortam değişkeni desteği, artık process.env.PORT ortam değişkeni aracılığıyla belirtilen herhangi bir kullanılabilir bağlantı noktasında çalışabileceğinden uygulamanın daha esnek olmasını sağlar."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "vietnamese",
"commitFix": "fix(server.ts): thay đổi chữ viết thường của biến port thành chữ viết hoa PORT",
"commitFeat": "feat(server.ts): thêm hỗ trợ cho biến môi trường process.env.PORT",
"commitDescription": "Biến port đã được đổi tên thành PORT, giúp cải thiện tính nhất quán trong việc đặt tên theo quy ước vì PORT là một hằng số. Hỗ trợ cho biến môi trường cho phép ứng dụng linh hoạt hơn khi có thể chạy trên bất kỳ cổng nào được chỉ định thông qua biến môi trường process.env.PORT.",
"commitFixOmitScope": "fix: thay đổi chữ viết thường của biến port thành chữ viết hoa PORT",
"commitFeatOmitScope": "feat: thêm hỗ trợ cho biến môi trường process.env.PORT"
"commitDescription": "Biến port đã được đổi tên thành PORT, giúp cải thiện tính nhất quán trong việc đặt tên theo quy ước vì PORT là một hằng số. Hỗ trợ cho biến môi trường cho phép ứng dụng linh hoạt hơn khi có thể chạy trên bất kỳ cổng nào được chỉ định thông qua biến môi trường process.env.PORT."
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "简体中文",
"commitFix": "fix(server.ts)将端口变量从小写port改为大写PORT",
"commitFeat": "feat(server.ts)添加对process.env.PORT环境变量的支持",
"commitDescription": "现在端口变量被命名为PORT这提高了命名约定的一致性因为PORT是一个常量。环境变量的支持使应用程序更加灵活因为它现在可以通过process.env.PORT环境变量在任何可用端口上运行。",
"commitFixOmitScope": "fix将端口变量从小写port改为大写PORT",
"commitFeatOmitScope": "feat添加对process.env.PORT环境变量的支持"
"commitDescription": "现在端口变量被命名为PORT这提高了命名约定的一致性因为PORT是一个常量。环境变量的支持使应用程序更加灵活因为它现在可以通过process.env.PORT环境变量在任何可用端口上运行。"
}

View File

@@ -2,7 +2,5 @@
"localLanguage": "繁體中文",
"commitFix": "修正(server.ts)將端口變數從小寫端口改為大寫PORT",
"commitFeat": "功能(server.ts)新增對process.env.PORT環境變數的支援",
"commitDescription": "現在port變數已更名為PORT以符合命名慣例因為PORT是一個常量。支援環境變數可以使應用程序更靈活因為它現在可以通過process.env.PORT環境變數運行在任何可用端口上。",
"commitFixOmitScope": "修正將端口變數從小寫端口改為大寫PORT",
"commitFeatOmitScope": "功能新增對process.env.PORT環境變數的支援"
"commitDescription": "現在port變數已更名為PORT以符合命名慣例因為PORT是一個常量。支援環境變數可以使應用程序更靈活因為它現在可以通過process.env.PORT環境變數運行在任何可用端口上。"
}

View File

@@ -36,19 +36,6 @@ export const runMigrations = async () => {
const config = getConfig();
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
// skip unhandled providers in migration00
if (
[
OCO_AI_PROVIDER_ENUM.DEEPSEEK,
OCO_AI_PROVIDER_ENUM.GROQ,
OCO_AI_PROVIDER_ENUM.MISTRAL,
OCO_AI_PROVIDER_ENUM.MLX,
OCO_AI_PROVIDER_ENUM.OPENROUTER
].includes(config.OCO_AI_PROVIDER)
) {
return;
}
const completedMigrations = getCompletedMigrations();
let isMigrated = false;

View File

@@ -53,7 +53,7 @@ export const configureCommitlintIntegration = async (force = false) => {
spin.start('Generating consistency with given @commitlint rules');
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig as any);
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig);
const consistencyPrompts =
commitlintPrompts.GEN_COMMITLINT_CONSISTENCY_PROMPT(prompts);

View File

@@ -58,16 +58,16 @@ const llmReadableRules: {
caseRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be in ${
Array.isArray(value)
? `one of the following case:
? `one of the following case:
- ${value.join('\n - ')}.`
: `${value} case.`
}`,
emptyRule: (key, applicable) => `The ${key} should ${applicable} be empty.`,
enumRule: (key, applicable, value: string | Array<string>) =>
`The ${key} should ${applicable} be one of the following values:
`The ${key} should ${applicable} be one of the following values:
- ${Array.isArray(value) ? value.join('\n - ') : value}.`,
enumTypeRule: (key, applicable, value: string | Array<string>, prompt) =>
`The ${key} should ${applicable} be one of the following values:
`The ${key} should ${applicable} be one of the following values:
- ${
Array.isArray(value)
? value
@@ -204,11 +204,7 @@ export const inferPromptsFromCommitlintConfig = (
* ubiquitous language from @commitlint.
* While gpt-4 does this on it self, gpt-3.5 can't map this on his own atm.
*/
const STRUCTURE_OF_COMMIT = config.OCO_OMIT_SCOPE
? `
- Header of commit is composed of type and subject: <type-of-commit>: <subject-of-commit>
- Description of commit is composed of body and footer (optional): <body-of-commit>\n<footer(s)-of-commit>`
: `
const STRUCTURE_OF_COMMIT = `
- Header of commit is composed of type, scope, subject: <type-of-commit>(<scope-of-commit>): <subject-of-commit>
- Description of commit is composed of body and footer (optional): <body-of-commit>\n<footer(s)-of-commit>`;
@@ -224,12 +220,8 @@ Here are the specific requirements and conventions that should be strictly follo
Commit Message Conventions:
- The commit message consists of three parts: Header, Body, and Footer.
- Header:
- Format: ${
config.OCO_OMIT_SCOPE
? '`<type>: <subject>`'
: '`<type>(<scope>): <subject>`'
}
- Header:
- Format: \`<type>(<scope>): <subject>\`
- ${prompts.join('\n- ')}
JSON Output Format:
@@ -237,19 +229,17 @@ JSON Output Format:
\`\`\`json
{
"localLanguage": "${translation.localLanguage}",
"commitFix": "<Header of commit for bug fix with scope>",
"commitFeat": "<Header of commit for feature with scope>",
"commitFixOmitScope": "<Header of commit for bug fix without scope>",
"commitFeatOmitScope": "<Header of commit for feature without scope>",
"commitFix": "<Header of commit for bug fix>",
"commitFeat": "<Header of commit for feature>",
"commitDescription": "<Description of commit for both the bug fix and the feature>"
}
\`\`\`
- The "commitDescription" should not include the commit message's header, only the description.
- The "commitDescription" should not include the commit messages header, only the description.
- Description should not be more than 74 characters.
Additional Details:
- Changing the variable 'port' to uppercase 'PORT' is considered a bug fix.
- Allowing the server to listen on a port specified through the environment variable is considered a new feature.
- Changing the variable 'port' to uppercase 'PORT' is considered a bug fix.
- Allowing the server to listen on a port specified through the environment variable is considered a new feature.
Example Git Diff is to follow:`
},
@@ -287,11 +277,7 @@ ${
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
: ''
}
${
config.OCO_OMIT_SCOPE
? 'Do not include a scope in the commit message format. Use the format: <type>: <subject>'
: ''
}
You will strictly follow the following conventions to generate the content of the commit message:
- ${prompts.join('\n- ')}

View File

@@ -4,8 +4,7 @@ import path from 'path';
const findModulePath = (moduleName: string) => {
const searchPaths = [
path.join('node_modules', moduleName),
path.join('node_modules', '.pnpm'),
path.resolve(__dirname, '../..')
path.join('node_modules', '.pnpm')
];
for (const basePath of searchPaths) {
@@ -60,7 +59,7 @@ export const getCommitLintPWDConfig =
* ES Module (commitlint@v19.x.x. <= )
* Directory import is not supported in ES Module resolution, so import the file directly
*/
modulePath = findModulePath('@commitlint/load/lib/load.js');
modulePath = await findModulePath('@commitlint/load/lib/load.js');
load = (await import(modulePath)).default;
break;
}

View File

@@ -21,7 +21,7 @@ export const getJSONBlock = (input: string): string => {
if (jsonIndex > -1) {
input = input.slice(jsonIndex + 8);
const endJsonIndex = input.search('```');
input = input.slice(0, endJsonIndex);
input = input.slice(0, endJsonIndex);
}
return input;
};

View File

@@ -108,12 +108,7 @@ const getDescriptionInstruction = () =>
const getOneLineCommitInstruction = () =>
config.OCO_ONE_LINE_COMMIT
? 'Craft a concise, single sentence, commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in one single message.'
: '';
const getScopeInstruction = () =>
config.OCO_OMIT_SCOPE
? 'Do not include a scope in the commit message format. Use the format: <type>: <subject>'
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
: '';
/**
@@ -146,18 +141,17 @@ const INIT_MAIN_PROMPT = (
const conventionGuidelines = getCommitConvention(fullGitMojiSpec);
const descriptionGuideline = getDescriptionInstruction();
const oneLineCommitGuideline = getOneLineCommitInstruction();
const scopeInstruction = getScopeInstruction();
const generalGuidelines = `Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`;
const userInputContext = userInputCodeContext(context);
return `${missionStatement}\n${diffInstruction}\n${conventionGuidelines}\n${descriptionGuideline}\n${oneLineCommitGuideline}\n${scopeInstruction}\n${generalGuidelines}\n${userInputContext}`;
return `${missionStatement}\n${diffInstruction}\n${conventionGuidelines}\n${descriptionGuideline}\n${oneLineCommitGuideline}\n${generalGuidelines}\n${userInputContext}`;
})()
});
export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessageParam =
{
role: 'user',
content: `diff --git a/src/server.ts b/src/server.ts
{
role: 'user',
content: `diff --git a/src/server.ts b/src/server.ts
index ad4db42..f3b18a9 100644
--- a/src/server.ts
+++ b/src/server.ts
@@ -181,49 +175,29 @@ export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessagePara
+app.listen(process.env.PORT || PORT, () => {
+ console.log(\`Server listening on port \${PORT}\`);
});`
};
const COMMIT_TYPES = {
fix: '🐛',
feat: '✨'
} as const;
const generateCommitString = (
type: keyof typeof COMMIT_TYPES,
message: string
): string => {
const cleanMessage = removeConventionalCommitWord(message);
return config.OCO_EMOJI ? `${COMMIT_TYPES[type]} ${cleanMessage}` : message;
};
const getConsistencyContent = (translation: ConsistencyPrompt) => {
const fixMessage =
config.OCO_OMIT_SCOPE && translation.commitFixOmitScope
? translation.commitFixOmitScope
: translation.commitFix;
const getContent = (translation: ConsistencyPrompt) => {
const fix = config.OCO_EMOJI
? `🐛 ${removeConventionalCommitWord(translation.commitFix)}`
: translation.commitFix;
const featMessage =
config.OCO_OMIT_SCOPE && translation.commitFeatOmitScope
? translation.commitFeatOmitScope
: translation.commitFeat;
const fix = generateCommitString('fix', fixMessage);
const feat = config.OCO_ONE_LINE_COMMIT
? ''
: generateCommitString('feat', featMessage);
const feat = config.OCO_EMOJI
? `${removeConventionalCommitWord(translation.commitFeat)}`
: translation.commitFeat;
const description = config.OCO_DESCRIPTION
? translation.commitDescription
: '';
return [fix, feat, description].filter(Boolean).join('\n');
return `${fix}\n${feat}\n${description}`;
};
const INIT_CONSISTENCY_PROMPT = (
translation: ConsistencyPrompt
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
role: 'assistant',
content: getConsistencyContent(translation)
content: getContent(translation)
});
export const getMainCommitPrompt = async (
@@ -250,7 +224,7 @@ export const getMainCommitPrompt = async (
INIT_DIFF_PROMPT,
INIT_CONSISTENCY_PROMPT(
commitLintConfig.consistency[
translation.localLanguage
translation.localLanguage
] as ConsistencyPrompt
)
];

View File

@@ -10,45 +10,17 @@ import { MistralAiEngine } from '../engine/mistral';
import { TestAi, TestMockType } from '../engine/testAi';
import { GroqEngine } from '../engine/groq';
import { MLXEngine } from '../engine/mlx';
import { DeepseekEngine } from '../engine/deepseek';
import { AimlApiEngine } from '../engine/aimlapi';
import { OpenRouterEngine } from '../engine/openrouter';
export function parseCustomHeaders(headers: any): Record<string, string> {
let parsedHeaders = {};
if (!headers) {
return parsedHeaders;
}
try {
if (typeof headers === 'object' && !Array.isArray(headers)) {
parsedHeaders = headers;
} else {
parsedHeaders = JSON.parse(headers);
}
} catch (error) {
console.warn(
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
);
}
return parsedHeaders;
}
export function getEngine(): AiEngine {
const config = getConfig();
const provider = config.OCO_AI_PROVIDER;
const customHeaders = parseCustomHeaders(config.OCO_API_CUSTOM_HEADERS);
const DEFAULT_CONFIG = {
model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_API_URL!,
apiKey: config.OCO_API_KEY!,
customHeaders
apiKey: config.OCO_API_KEY!
};
switch (provider) {
@@ -79,15 +51,6 @@ export function getEngine(): AiEngine {
case OCO_AI_PROVIDER_ENUM.MLX:
return new MLXEngine(DEFAULT_CONFIG);
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
return new DeepseekEngine(DEFAULT_CONFIG);
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
return new AimlApiEngine(DEFAULT_CONFIG);
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
return new OpenRouterEngine(DEFAULT_CONFIG);
default:
return new OpenAiEngine(DEFAULT_CONFIG);
}

View File

@@ -1,7 +1,7 @@
import { execa } from 'execa';
import { readFileSync } from 'fs';
import ignore, { Ignore } from 'ignore';
import { join } from 'path';
import { outro, spinner } from '@clack/prompts';
export const assertGitRepo = async () => {
@@ -16,44 +16,41 @@ export const assertGitRepo = async () => {
// (file) => `:(exclude)${file}`
// );
export const getOpenCommitIgnore = async (): Promise<Ignore> => {
const gitDir = await getGitDir();
export const getOpenCommitIgnore = (): Ignore => {
const ig = ignore();
try {
ig.add(
readFileSync(join(gitDir, '.opencommitignore')).toString().split('\n')
);
ig.add(readFileSync('.opencommitignore').toString().split('\n'));
} catch (e) {}
return ig;
};
export const getCoreHooksPath = async (): Promise<string> => {
const gitDir = await getGitDir();
const { stdout } = await execa('git', ['config', 'core.hooksPath'], {
cwd: gitDir
});
const { stdout } = await execa('git', ['config', 'core.hooksPath']);
return stdout;
};
export const getStagedFiles = async (): Promise<string[]> => {
const gitDir = await getGitDir();
const { stdout: gitDir } = await execa('git', [
'rev-parse',
'--show-toplevel'
]);
const { stdout: files } = await execa(
'git',
['diff', '--name-only', '--cached', '--relative'],
{ cwd: gitDir }
);
const { stdout: files } = await execa('git', [
'diff',
'--name-only',
'--cached',
'--relative',
gitDir
]);
if (!files) return [];
const filesList = files.split('\n');
const ig = await getOpenCommitIgnore();
const ig = getOpenCommitIgnore();
const allowedFiles = filesList.filter((file) => !ig.ignores(file));
if (!allowedFiles) return [];
@@ -62,17 +59,12 @@ export const getStagedFiles = async (): Promise<string[]> => {
};
export const getChangedFiles = async (): Promise<string[]> => {
const gitDir = await getGitDir();
const { stdout: modified } = await execa('git', ['ls-files', '--modified'], {
cwd: gitDir
});
const { stdout: others } = await execa(
'git',
['ls-files', '--others', '--exclude-standard'],
{ cwd: gitDir }
);
const { stdout: modified } = await execa('git', ['ls-files', '--modified']);
const { stdout: others } = await execa('git', [
'ls-files',
'--others',
'--exclude-standard'
]);
const files = [...modified.split('\n'), ...others.split('\n')].filter(
(file) => !!file
@@ -82,20 +74,16 @@ export const getChangedFiles = async (): Promise<string[]> => {
};
export const gitAdd = async ({ files }: { files: string[] }) => {
const gitDir = await getGitDir();
const gitAddSpinner = spinner();
gitAddSpinner.start('Adding files to commit');
await execa('git', ['add', ...files], { cwd: gitDir });
await execa('git', ['add', ...files]);
gitAddSpinner.stop(`Staged ${files.length} files`);
gitAddSpinner.stop('Done');
};
export const getDiff = async ({ files }: { files: string[] }) => {
const gitDir = await getGitDir();
const lockFiles = files.filter(
(file) =>
file.includes('.lock') ||
@@ -120,20 +108,12 @@ export const getDiff = async ({ files }: { files: string[] }) => {
(file) => !file.includes('.lock') && !file.includes('-lock.')
);
const { stdout: diff } = await execa(
'git',
['diff', '--staged', '--', ...filesWithoutLocks],
{ cwd: gitDir }
);
const { stdout: diff } = await execa('git', [
'diff',
'--staged',
'--',
...filesWithoutLocks
]);
return diff;
};
export const getGitDir = async (): Promise<string> => {
const { stdout: gitDir } = await execa('git', [
'rev-parse',
'--show-toplevel'
]);
return gitDir;
};

View File

@@ -1,57 +0,0 @@
/**
* Removes content wrapped in specified tags from a string
* @param content The content string to process
* @param tag The tag name without angle brackets (e.g., 'think' for '<think></think>')
* @returns The content with the specified tags and their contents removed, and trimmed
*/
export function removeContentTags<T extends string | null | undefined>(
content: T,
tag: string
): T {
if (!content || typeof content !== 'string') {
return content;
}
// Dynamic implementation for other cases
const openTag = `<${tag}>`;
const closeTag = `</${tag}>`;
// Parse the content and remove tags
let result = '';
let skipUntil: number | null = null;
let depth = 0;
for (let i = 0; i < content.length; i++) {
// Check for opening tag
if (content.substring(i, i + openTag.length) === openTag) {
depth++;
if (depth === 1) {
skipUntil = content.indexOf(closeTag, i + openTag.length);
i = i + openTag.length - 1; // Skip the opening tag
continue;
}
}
// Check for closing tag
else if (
content.substring(i, i + closeTag.length) === closeTag &&
depth > 0
) {
depth--;
if (depth === 0) {
i = i + closeTag.length - 1; // Skip the closing tag
skipUntil = null;
continue;
}
}
// Only add character if not inside a tag
if (skipUntil === null) {
result += content[i];
}
}
// Normalize multiple spaces/tabs into a single space (preserves newlines), then trim.
result = result.replace(/[ \t]+/g, ' ').trim();
return result as unknown as T;
}

View File

@@ -125,7 +125,7 @@ describe('cli flow to push git branch', () => {
await render('git', ['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
@@ -158,7 +158,7 @@ describe('cli flow to push git branch', () => {
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);
@@ -186,7 +186,7 @@ describe('cli flow to push git branch', () => {
await render('git', ['add index.ts'], { cwd: gitDir });
const { findByText, userEvent } = await render(
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
`OCO_AI_PROVIDER='test' node`,
[resolve('./out/cli.cjs')],
{ cwd: gitDir }
);

View File

@@ -9,7 +9,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
await render('git' ,['add index.ts'], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
@@ -34,7 +34,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
expect(await findByText('No files are staged')).toBeInTheConsole();
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();

View File

@@ -5,8 +5,8 @@ import { prepareEnvironment, wait } from '../utils';
import path from 'path';
function getAbsolutePath(relativePath: string) {
// Use process.cwd() which should be the project root during test execution
return path.resolve(process.cwd(), 'test/e2e/prompt-module', relativePath);
const scriptDir = path.dirname(__filename);
return path.resolve(scriptDir, relativePath);
}
async function setupCommitlint(dir: string, ver: 9 | 18 | 19) {
let packagePath, packageJsonPath, configPath;
@@ -47,7 +47,7 @@ describe('cli flow to run "oco commitlint force"', () => {
`
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} commitlint force \
`,
[],
@@ -83,7 +83,7 @@ describe('cli flow to run "oco commitlint force"', () => {
`
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} commitlint force \
`,
[],
@@ -119,7 +119,7 @@ describe('cli flow to run "oco commitlint force"', () => {
`
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} commitlint force \
`,
[],
@@ -160,7 +160,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
`
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} commitlint force \
`,
[],
@@ -175,7 +175,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
`
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} commitlint get \
`,
[],
@@ -193,7 +193,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
`
OCO_TEST_MOCK_TYPE='commit-message' \
OCO_PROMPT_MODULE='@commitlint' \
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
OCO_AI_PROVIDER='test' \
node ${resolve('./out/cli.cjs')} \
`,
[],

View File

@@ -1,11 +1,10 @@
import 'cli-testing-library/extend-expect'
import { configure } from 'cli-testing-library'
import { jest } from '@jest/globals';
import 'cli-testing-library/extend-expect';
import { configure } from 'cli-testing-library';
// Make Jest available globally
global.jest = jest;
/**
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
*/
configure({ asyncUtilTimeout: 2000 });
configure({ asyncUtilTimeout: 2000 })

View File

@@ -106,8 +106,7 @@ describe('config', () => {
envConfigFile = await generateConfig('.env', {
OCO_TOKENS_MAX_INPUT: '8192',
OCO_ONE_LINE_COMMIT: 'false',
OCO_OMIT_SCOPE: 'true'
OCO_ONE_LINE_COMMIT: 'false'
});
const config = getConfig({
@@ -120,31 +119,6 @@ describe('config', () => {
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
expect(config.OCO_GITPUSH).toEqual(true);
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
expect(config.OCO_OMIT_SCOPE).toEqual(true);
});
it('should handle custom HTTP headers correctly', async () => {
globalConfigFile = await generateConfig('.opencommit', {
OCO_API_CUSTOM_HEADERS: '{"X-Global-Header": "global-value"}'
});
envConfigFile = await generateConfig('.env', {
OCO_API_CUSTOM_HEADERS: '{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}'
});
const config = getConfig({
globalPath: globalConfigFile.filePath,
envPath: envConfigFile.filePath
});
expect(config).not.toEqual(null);
expect(config.OCO_API_CUSTOM_HEADERS).toEqual({"Authorization": "Bearer token123", "X-Custom-Header": "test-value"});
// No need to parse JSON again since it's already an object
const parsedHeaders = config.OCO_API_CUSTOM_HEADERS;
expect(parsedHeaders).toHaveProperty('Authorization', 'Bearer token123');
expect(parsedHeaders).toHaveProperty('X-Custom-Header', 'test-value');
expect(parsedHeaders).not.toHaveProperty('X-Global-Header');
});
it('should handle empty local config correctly', async () => {

View File

@@ -1,57 +0,0 @@
import { removeContentTags } from '../../src/utils/removeContentTags';
describe('removeContentTags', () => {
it('should remove content wrapped in specified tags', () => {
const content = 'This is <think>something to hide</think> visible content';
const result = removeContentTags(content, 'think');
expect(result).toBe('This is visible content');
});
it('should handle multiple tag occurrences', () => {
const content = '<think>hidden</think> visible <think>also hidden</think> text';
const result = removeContentTags(content, 'think');
expect(result).toBe('visible text');
});
it('should handle multiline content within tags', () => {
const content = 'Start <think>hidden\nover multiple\nlines</think> End';
const result = removeContentTags(content, 'think');
expect(result).toBe('Start End');
});
it('should return content as is when tag is not found', () => {
const content = 'Content without any tags';
const result = removeContentTags(content, 'think');
expect(result).toBe('Content without any tags');
});
it('should work with different tag names', () => {
const content = 'This is <custom>something to hide</custom> visible content';
const result = removeContentTags(content, 'custom');
expect(result).toBe('This is visible content');
});
it('should handle null content', () => {
const content = null;
const result = removeContentTags(content, 'think');
expect(result).toBe(null);
});
it('should handle undefined content', () => {
const content = undefined;
const result = removeContentTags(content, 'think');
expect(result).toBe(undefined);
});
it('should trim the result', () => {
const content = ' <think>hidden</think> visible ';
const result = removeContentTags(content, 'think');
expect(result).toBe('visible');
});
it('should handle nested tags correctly', () => {
const content = 'Outside <think>Inside <think>Nested</think></think> End';
const result = removeContentTags(content, 'think');
expect(result).toBe('Outside End');
});
});

View File

@@ -3,10 +3,10 @@
"target": "ES2020",
"lib": ["ES6", "ES2020"],
"module": "NodeNext",
"module": "CommonJS",
"resolveJsonModule": true,
"moduleResolution": "NodeNext",
"moduleResolution": "Node",
"allowJs": true,