Compare commits
94 Commits
v3.2.9
...
refactorin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5fde6dbb63 | ||
|
|
58b9d844b8 | ||
|
|
9855ed1f69 | ||
|
|
7e41139d9c | ||
|
|
66a8c2b52a | ||
|
|
57fb52a3c5 | ||
|
|
88964cbc5e | ||
|
|
cf27085ac9 | ||
|
|
7fa2384761 | ||
|
|
fa1482d8b1 | ||
|
|
f656c39f63 | ||
|
|
420a15343c | ||
|
|
fd9820dd64 | ||
|
|
2d9a26dc37 | ||
|
|
8cbaa36e82 | ||
|
|
42029fff4e | ||
|
|
4d767da9e5 | ||
|
|
361327a8fe | ||
|
|
3a2fa11fcd | ||
|
|
4056bfa547 | ||
|
|
a48d33096a | ||
|
|
d5dcd42d2c | ||
|
|
f300b5dd4e | ||
|
|
15884724e6 | ||
|
|
0b6fda1c2b | ||
|
|
a7fd0d8237 | ||
|
|
6cb67e5150 | ||
|
|
62129503b3 | ||
|
|
f81e836f34 | ||
|
|
c3d1fb379f | ||
|
|
e17294abc7 | ||
|
|
789b4f5e9f | ||
|
|
a9c9bcfd5a | ||
|
|
0ee82f7430 | ||
|
|
9923dab532 | ||
|
|
f74ba2dfc6 | ||
|
|
53414438d1 | ||
|
|
6982e76cf5 | ||
|
|
dc7f7f6552 | ||
|
|
db8a22b0cb | ||
|
|
e27007b6fe | ||
|
|
f51393e37a | ||
|
|
83f9193749 | ||
|
|
bc608e97bd | ||
|
|
40182f26b3 | ||
|
|
62d56a5278 | ||
|
|
9e601ca6b5 | ||
|
|
4a9b1391a3 | ||
|
|
3fe71c1d23 | ||
|
|
2f2e888098 | ||
|
|
4fc8284b87 | ||
|
|
689f52b22f | ||
|
|
de5d5cbb95 | ||
|
|
ccc227ed85 | ||
|
|
9ca7c02840 | ||
|
|
6d9fff56aa | ||
|
|
6ed70d0382 | ||
|
|
5b241ed2d0 | ||
|
|
8b0ee25923 | ||
|
|
fdd4d89bba | ||
|
|
d70797b864 | ||
|
|
74fff2861b | ||
|
|
a0dc1c87c5 | ||
|
|
d65547dcaa | ||
|
|
ebbaff0628 | ||
|
|
4f164a31d1 | ||
|
|
a70a2b8a9f | ||
|
|
52bb719f4e | ||
|
|
c904a78cd9 | ||
|
|
22077399fd | ||
|
|
8ae2f7ddf1 | ||
|
|
b318d1d882 | ||
|
|
af0f2c1df4 | ||
|
|
c5ce50aaa3 | ||
|
|
c1756b85af | ||
|
|
dac1271782 | ||
|
|
1cc7a64f99 | ||
|
|
4deb7bca65 | ||
|
|
1a90485a10 | ||
|
|
48b8d9d7b2 | ||
|
|
7e60c68ba5 | ||
|
|
24adc16adf | ||
|
|
881f07eebe | ||
|
|
3a255a3ad9 | ||
|
|
9971b3c74e | ||
|
|
66a5695d89 | ||
|
|
fd22f713ed | ||
|
|
43dc5e6c2b | ||
|
|
3d42dde48c | ||
|
|
19f32ca57d | ||
|
|
c1070789fd | ||
|
|
1f0f44ede0 | ||
|
|
45aed936b1 | ||
|
|
e4f7e8dc80 |
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": ["simple-import-sort", "import", "@typescript-eslint", "prettier"],
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"node": {
|
||||
"extensions": [".js", ".jsx", ".ts", ".tsx"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"packageManager": "npm",
|
||||
"rules": {
|
||||
"prettier/prettier": "error",
|
||||
"no-console": "error",
|
||||
"import/order": "off",
|
||||
"sort-imports": "off",
|
||||
"simple-import-sort/imports": "error",
|
||||
"simple-import-sort/exports": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-duplicates": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "off"
|
||||
}
|
||||
}
|
||||
10
.github/ISSUE_TEMPLATE/bug.yaml
vendored
@@ -1,7 +1,7 @@
|
||||
name: 🐞 Bug Report
|
||||
description: File a bug report
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "triage"]
|
||||
title: '[Bug]: '
|
||||
labels: ['bug', 'triage']
|
||||
assignees:
|
||||
- octocat
|
||||
body:
|
||||
@@ -48,7 +48,7 @@ body:
|
||||
label: What happened?
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you see!
|
||||
value: "A bug happened!"
|
||||
value: 'A bug happened!'
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -58,7 +58,7 @@ body:
|
||||
description: Also tell us, what did you expect to happen?
|
||||
placeholder: Tell us what you expected to happen!
|
||||
validations:
|
||||
required: true
|
||||
required: true
|
||||
- type: textarea
|
||||
id: current-behavior
|
||||
attributes:
|
||||
@@ -88,4 +88,4 @@ body:
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
render: shell
|
||||
|
||||
6
.github/ISSUE_TEMPLATE/featureRequest.yaml
vendored
@@ -1,9 +1,9 @@
|
||||
---
|
||||
name: 🛠️ Feature Request
|
||||
description: Suggest an idea to help us improve Opencommit
|
||||
title: "[Feature]: "
|
||||
title: '[Feature]: '
|
||||
labels:
|
||||
- "feature_request"
|
||||
- 'feature_request'
|
||||
|
||||
body:
|
||||
- type: markdown
|
||||
@@ -45,4 +45,4 @@ body:
|
||||
description: |
|
||||
Add any other context about the problem here.
|
||||
validations:
|
||||
required: false
|
||||
required: false
|
||||
|
||||
BIN
.github/github-mark-white.png
vendored
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 2.8 KiB |
BIN
.github/logo-black.png
vendored
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 1.7 KiB |
14
.github/logo-grad.svg
vendored
@@ -1,13 +1 @@
|
||||
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z" fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D33075"/>
|
||||
<stop offset="1" stop-color="#6157D8"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D33075"/>
|
||||
<stop offset="1" stop-color="#6157D8"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)" d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z"/><defs><linearGradient id="paint0_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient><linearGradient id="paint1_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient></defs></svg>
|
||||
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
5
.github/logo.svg
vendored
@@ -1,4 +1 @@
|
||||
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z" fill="black"/>
|
||||
<path d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z" fill="black"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="#000" d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z"/><path fill="#000" d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
BIN
.github/opencommit-example.png
vendored
|
Before Width: | Height: | Size: 304 KiB After Width: | Height: | Size: 237 KiB |
63
.github/workflows/codeql.yml
vendored
@@ -9,14 +9,14 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
name: 'CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
branches: ['master']
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master" ]
|
||||
branches: ['master']
|
||||
schedule:
|
||||
- cron: '21 16 * * 0'
|
||||
|
||||
@@ -32,45 +32,44 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'javascript' ]
|
||||
language: ['javascript']
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Use only 'java' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: '/language:${{matrix.language}}'
|
||||
|
||||
75
.github/workflows/test.yml
vendored
@@ -8,46 +8,41 @@ on:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
linux-tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Run Unit Tests
|
||||
run: npm run test:unit
|
||||
e2e-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.email "test@example.com"
|
||||
git config --global user.name "Test User"
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run Lint
|
||||
run: npm run lint
|
||||
- name: Run Format Check
|
||||
run: npm run format:check
|
||||
- name: Run Unit Tests
|
||||
run: npm run test:unit
|
||||
- name: Run Core E2E Tests
|
||||
run: npm run test:e2e:core
|
||||
- name: Run Prompt Module E2E Tests
|
||||
run: npm run test:e2e:prompt-module
|
||||
macos-smoke:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
- name: Install git
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y git
|
||||
git --version
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.email "test@example.com"
|
||||
git config --global user.name "Test User"
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Build
|
||||
run: npm run build
|
||||
- name: Run E2E Tests
|
||||
run: npm run test:e2e
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run Smoke E2E Tests
|
||||
run: npm run test:e2e:smoke
|
||||
|
||||
1
.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
node_modules/
|
||||
out/
|
||||
coverage/
|
||||
temp/
|
||||
build/
|
||||
|
||||
56
README.md
@@ -74,6 +74,22 @@ oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
|
||||
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
|
||||
|
||||
#### Troubleshooting Ollama IPv6/IPv4 Connection Fix
|
||||
|
||||
If you encounter issues with Ollama, such as the error
|
||||
|
||||
```sh
|
||||
✖ local model issues. details: connect ECONNREFUSED ::1:11434
|
||||
```
|
||||
|
||||
It's likely because Ollama is not listening on IPv6 by default. To fix this, you can set the OLLAMA_HOST environment variable to 0.0.0.0 before starting Ollama:
|
||||
|
||||
```bash
|
||||
export OLLAMA_HOST=0.0.0.0
|
||||
```
|
||||
|
||||
This will make Ollama listen on all interfaces, including IPv6 and IPv4, resolving the connection issue. You can add this line to your shell configuration file (like `.bashrc` or `.zshrc`) to make it persistent across sessions.
|
||||
|
||||
### Flags
|
||||
|
||||
There are multiple optional flags that can be used with the `oco` command:
|
||||
@@ -106,7 +122,7 @@ Create a `.env` file and add OpenCommit config variables there like this:
|
||||
|
||||
```env
|
||||
...
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek>
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi>
|
||||
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
|
||||
OCO_API_URL=<may be used to set proxy path to OpenAI api>
|
||||
OCO_API_CUSTOM_HEADERS=<JSON string of custom HTTP headers to include in API requests>
|
||||
@@ -185,6 +201,28 @@ or for as a cheaper option:
|
||||
oco config set OCO_MODEL=gpt-3.5-turbo
|
||||
```
|
||||
|
||||
### Model Management
|
||||
|
||||
OpenCommit automatically fetches available models from your provider when you run `oco setup`. Models are cached for 7 days to reduce API calls.
|
||||
|
||||
To see available models for your current provider:
|
||||
|
||||
```sh
|
||||
oco models
|
||||
```
|
||||
|
||||
To refresh the model list (e.g., after new models are released):
|
||||
|
||||
```sh
|
||||
oco models --refresh
|
||||
```
|
||||
|
||||
To see models for a specific provider:
|
||||
|
||||
```sh
|
||||
oco models --provider anthropic
|
||||
```
|
||||
|
||||
### Switch to other LLM providers with a custom URL
|
||||
|
||||
By default OpenCommit uses [OpenAI](https://openai.com).
|
||||
@@ -199,6 +237,22 @@ oco config set OCO_AI_PROVIDER=flowise OCO_API_KEY=<your_flowise_api_key> OCO_AP
|
||||
oco config set OCO_AI_PROVIDER=ollama OCO_API_KEY=<your_ollama_api_key> OCO_API_URL=<your_ollama_endpoint>
|
||||
```
|
||||
|
||||
### Use with Proxy
|
||||
|
||||
If you are behind a proxy, you can set it in the config:
|
||||
|
||||
```sh
|
||||
oco config set OCO_PROXY=http://127.0.0.1:7890
|
||||
```
|
||||
|
||||
If `OCO_PROXY` is unset, OpenCommit will automatically use `HTTPS_PROXY` or `HTTP_PROXY` environment variables.
|
||||
|
||||
To explicitly disable proxy use for OpenCommit, even when those environment variables are set:
|
||||
|
||||
```sh
|
||||
oco config set OCO_PROXY=null
|
||||
```
|
||||
|
||||
### Locale configuration
|
||||
|
||||
To globally specify the language used to generate commit messages:
|
||||
|
||||
57
biome.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/2.4.11/schema.json",
|
||||
|
||||
"vcs": {
|
||||
"enabled": true,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": true
|
||||
},
|
||||
|
||||
"files": {
|
||||
"ignoreUnknown": true,
|
||||
"includes": ["**", "!!build", "!!dist", "!!out"]
|
||||
},
|
||||
|
||||
"formatter": {
|
||||
"enabled": false,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2,
|
||||
"lineEnding": "lf"
|
||||
},
|
||||
|
||||
"javascript": {
|
||||
"formatter": {
|
||||
"quoteStyle": "single",
|
||||
"jsxQuoteStyle": "double",
|
||||
"trailingCommas": "none",
|
||||
"semicolons": "always"
|
||||
}
|
||||
},
|
||||
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"suspicious": {
|
||||
"noConsole": "off",
|
||||
"noImplicitAnyLet": "off",
|
||||
"useIterableCallbackReturn": "off"
|
||||
},
|
||||
"correctness": {
|
||||
"noSwitchDeclarations": "off"
|
||||
},
|
||||
"style": {
|
||||
"noNonNullAssertion": "off"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"assist": {
|
||||
"enabled": false,
|
||||
"actions": {
|
||||
"source": {
|
||||
"organizeImports": "off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,9 @@ const config: Config = {
|
||||
testRegex: ['.*\\.test\\.ts$'],
|
||||
// Tell Jest to ignore the specific duplicate package.json files
|
||||
// that are causing Haste module naming collisions
|
||||
modulePathIgnorePatterns: [
|
||||
'<rootDir>/test/e2e/prompt-module/data/'
|
||||
],
|
||||
modulePathIgnorePatterns: ['<rootDir>/test/e2e/prompt-module/data/'],
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(cli-testing-library|@clack|cleye)/.*)'
|
||||
'node_modules/(?!(cli-testing-library|@clack|cleye|chalk)/.*)'
|
||||
],
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx|js|jsx|mjs)$': [
|
||||
|
||||
24053
out/cli.cjs
87722
out/github-action.cjs
1474
package-lock.json
generated
28
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.2.9",
|
||||
"version": "3.2.19",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -49,30 +49,37 @@
|
||||
"deploy": "npm publish --tag latest",
|
||||
"deploy:build": "npm run build:push && git push --tags && npm run deploy",
|
||||
"deploy:patch": "npm version patch && npm run deploy:build",
|
||||
"lint": "eslint src --ext ts && tsc --noEmit",
|
||||
"format": "prettier --write src",
|
||||
"lint": "biome check . --diagnostic-level=error && tsc --noEmit",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"format": "prettier --write \"src/**/*.{ts,js,json,md}\" \"test/**/*.{ts,js,json,md}\" \".github/**/*.{yml,yaml}\" \"*.{js,json,ts,md,yml,yaml}\"",
|
||||
"format:check": "prettier --check \"src/**/*.{ts,js,json,md}\" \"test/**/*.{ts,js,json,md}\" \".github/**/*.{yml,yaml}\" \"*.{js,json,ts,md,yml,yaml}\"",
|
||||
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
|
||||
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
|
||||
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
|
||||
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest test/unit",
|
||||
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
|
||||
"test:e2e": "npm run test:e2e:setup && jest test/e2e",
|
||||
"test:e2e:setup": "sh test/e2e/setup.sh",
|
||||
"test:e2e": "npm run build && npm run test:e2e:smoke:run && npm run test:e2e:core:run && npm run test:e2e:prompt-module:run",
|
||||
"test:e2e:smoke": "npm run build && npm run test:e2e:smoke:run",
|
||||
"test:e2e:smoke:run": "OCO_TEST_SKIP_VERSION_CHECK=true jest test/e2e/smoke.test.ts",
|
||||
"test:e2e:core": "npm run build && npm run test:e2e:core:run",
|
||||
"test:e2e:core:run": "OCO_TEST_SKIP_VERSION_CHECK=true jest test/e2e/cliBehavior.test.ts test/e2e/geminiBehavior.test.ts test/e2e/gitPush.test.ts test/e2e/oneFile.test.ts test/e2e/noChanges.test.ts",
|
||||
"test:e2e:setup": "npm run test:e2e:prompt-module:setup",
|
||||
"test:e2e:prompt-module:setup": "sh test/e2e/setup.sh",
|
||||
"test:e2e:prompt-module": "npm run build && npm run test:e2e:prompt-module:run",
|
||||
"test:e2e:prompt-module:run": "npm run test:e2e:prompt-module:setup && OCO_TEST_SKIP_VERSION_CHECK=true jest test/e2e/prompt-module",
|
||||
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e",
|
||||
"mlx:start": "OCO_AI_PROVIDER='mlx' node ./out/cli.cjs"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.4.11",
|
||||
"@commitlint/types": "^17.4.4",
|
||||
"@types/ini": "^1.3.31",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^16.18.14",
|
||||
"@typescript-eslint/eslint-plugin": "^8.29.0",
|
||||
"@typescript-eslint/parser": "^8.29.0",
|
||||
"cli-testing-library": "^2.0.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"esbuild": "^0.25.5",
|
||||
"eslint": "^9.24.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^2.8.4",
|
||||
"rimraf": "^6.0.1",
|
||||
@@ -88,15 +95,16 @@
|
||||
"@azure/openai": "^1.0.0-beta.12",
|
||||
"@clack/prompts": "^0.6.1",
|
||||
"@dqbd/tiktoken": "^1.0.2",
|
||||
"@google/generative-ai": "^0.11.4",
|
||||
"@google/generative-ai": "^0.24.1",
|
||||
"@mistralai/mistralai": "^1.3.5",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
"@octokit/webhooks-types": "^6.11.0",
|
||||
"axios": "^1.3.4",
|
||||
"axios": "1.9.0",
|
||||
"chalk": "^5.2.0",
|
||||
"cleye": "^1.3.2",
|
||||
"crypto": "^1.0.1",
|
||||
"execa": "^7.0.0",
|
||||
"https-proxy-agent": "^8.0.0",
|
||||
"ignore": "^5.2.4",
|
||||
"ini": "^3.0.1",
|
||||
"inquirer": "^9.1.4",
|
||||
|
||||
75
src/cli.ts
@@ -5,19 +5,63 @@ import { cli } from 'cleye';
|
||||
import packageJSON from '../package.json';
|
||||
import { commit } from './commands/commit';
|
||||
import { commitlintConfigCommand } from './commands/commitlint';
|
||||
import { configCommand } from './commands/config';
|
||||
import { configCommand, getConfig } from './commands/config';
|
||||
import { hookCommand, isHookCalled } from './commands/githook.js';
|
||||
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
|
||||
import { resolveProxy, setupProxy } from './utils/proxy';
|
||||
import {
|
||||
setupCommand,
|
||||
isFirstRun,
|
||||
runSetup,
|
||||
promptForMissingApiKey
|
||||
} from './commands/setup';
|
||||
import { modelsCommand } from './commands/models';
|
||||
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
|
||||
import { runMigrations } from './migrations/_run.js';
|
||||
|
||||
const extraArgs = process.argv.slice(2);
|
||||
const config = getConfig();
|
||||
setupProxy(resolveProxy(config.OCO_PROXY));
|
||||
|
||||
const OCO_FLAGS_WITH_VALUE = new Set(['-c', '--context']);
|
||||
const OCO_BOOLEAN_FLAGS = new Set(['-y', '--yes', '--fgm']);
|
||||
const OCO_EQUALS_PREFIXES = ['-c=', '--context=', '-y=', '--yes=', '--fgm='];
|
||||
|
||||
const stripOcoFlags = (argv: string[]): string[] => {
|
||||
const out: string[] = [];
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
const a = argv[i];
|
||||
// String flags with a separate value token: -c <val>, --context <val>
|
||||
if (OCO_FLAGS_WITH_VALUE.has(a)) {
|
||||
i++; // skip the value token too
|
||||
continue;
|
||||
}
|
||||
// Boolean flags: -y, --yes, --fgm
|
||||
if (OCO_BOOLEAN_FLAGS.has(a)) {
|
||||
continue;
|
||||
}
|
||||
// Equals form: -c=…, --context=…, -y=…, --yes=…, --fgm=…
|
||||
if (OCO_EQUALS_PREFIXES.some((prefix) => a.startsWith(prefix))) {
|
||||
continue;
|
||||
}
|
||||
out.push(a);
|
||||
}
|
||||
return out;
|
||||
};
|
||||
|
||||
const rawArgv = process.argv.slice(2);
|
||||
const extraArgs = stripOcoFlags(rawArgv);
|
||||
|
||||
cli(
|
||||
{
|
||||
version: packageJSON.version,
|
||||
name: 'opencommit',
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
commands: [
|
||||
configCommand,
|
||||
hookCommand,
|
||||
commitlintConfigCommand,
|
||||
setupCommand,
|
||||
modelsCommand
|
||||
],
|
||||
flags: {
|
||||
fgm: {
|
||||
type: Boolean,
|
||||
@@ -41,14 +85,29 @@ cli(
|
||||
help: { description: packageJSON.description }
|
||||
},
|
||||
async ({ flags }) => {
|
||||
if (await isHookCalled()) {
|
||||
await prepareCommitMessageHook();
|
||||
return;
|
||||
}
|
||||
|
||||
await runMigrations();
|
||||
await checkIsLatestVersion();
|
||||
|
||||
if (await isHookCalled()) {
|
||||
prepareCommitMessageHook();
|
||||
} else {
|
||||
commit(extraArgs, flags.context, false, flags.fgm, flags.yes);
|
||||
// Check for first run and trigger setup wizard
|
||||
if (isFirstRun()) {
|
||||
const setupComplete = await runSetup();
|
||||
if (!setupComplete) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing API key and prompt if needed
|
||||
const hasApiKey = await promptForMissingApiKey();
|
||||
if (!hasApiKey) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
commit(extraArgs, flags.context, false, flags.fgm, flags.yes);
|
||||
},
|
||||
extraArgs
|
||||
rawArgv
|
||||
);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
export enum COMMANDS {
|
||||
config = 'config',
|
||||
hook = 'hook',
|
||||
commitlint = 'commitlint'
|
||||
commitlint = 'commitlint',
|
||||
setup = 'setup',
|
||||
models = 'models'
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {
|
||||
text,
|
||||
confirm,
|
||||
intro,
|
||||
isCancel,
|
||||
@@ -10,6 +11,7 @@ import {
|
||||
import chalk from 'chalk';
|
||||
import { execa } from 'execa';
|
||||
import { generateCommitMessageByDiff } from '../generateCommitMessageFromGitDiff';
|
||||
import { formatUserFriendlyError, printFormattedError } from '../utils/errors';
|
||||
import {
|
||||
assertGitRepo,
|
||||
getChangedFiles,
|
||||
@@ -27,6 +29,32 @@ const getGitRemotes = async () => {
|
||||
return stdout.split('\n').filter((remote) => Boolean(remote.trim()));
|
||||
};
|
||||
|
||||
const hasUpstreamBranch = async (): Promise<boolean> => {
|
||||
try {
|
||||
await execa('git', [
|
||||
'rev-parse',
|
||||
'--abbrev-ref',
|
||||
'--symbolic-full-name',
|
||||
'@{u}'
|
||||
]);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
const getCurrentBranch = async (): Promise<string> => {
|
||||
const { stdout } = await execa('git', ['branch', '--show-current']);
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
const displayPushUrl = (stderr: string) => {
|
||||
const urlMatch = stderr.match(/https?:\/\/\S+/);
|
||||
if (urlMatch) {
|
||||
outro(`${chalk.cyan('Create a pull request:')} ${urlMatch[0]}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Check for the presence of message templates
|
||||
const checkMessageTemplate = (extraArgs: string[]): string | false => {
|
||||
for (const key in extraArgs) {
|
||||
@@ -85,15 +113,29 @@ ${commitMessage}
|
||||
${chalk.grey('——————————————————')}`
|
||||
);
|
||||
|
||||
const isCommitConfirmedByUser =
|
||||
skipCommitConfirmation ||
|
||||
(await confirm({
|
||||
message: 'Confirm the commit message?'
|
||||
}));
|
||||
const userAction = skipCommitConfirmation
|
||||
? 'Yes'
|
||||
: await select({
|
||||
message: 'Confirm the commit message?',
|
||||
options: [
|
||||
{ value: 'Yes', label: 'Yes' },
|
||||
{ value: 'No', label: 'No' },
|
||||
{ value: 'Edit', label: 'Edit' }
|
||||
]
|
||||
});
|
||||
|
||||
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
|
||||
if (isCancel(userAction)) process.exit(1);
|
||||
|
||||
if (isCommitConfirmedByUser) {
|
||||
if (userAction === 'Edit') {
|
||||
const textResponse = await text({
|
||||
message: 'Please edit the commit message: (press Enter to continue)',
|
||||
initialValue: commitMessage
|
||||
});
|
||||
|
||||
commitMessage = textResponse.toString();
|
||||
}
|
||||
|
||||
if (userAction === 'Yes' || userAction === 'Edit') {
|
||||
const committingChangesSpinner = spinner();
|
||||
committingChangesSpinner.start('Committing the changes');
|
||||
const { stdout } = await execa('git', [
|
||||
@@ -114,8 +156,13 @@ ${chalk.grey('——————————————————')}`
|
||||
if (config.OCO_GITPUSH === false) return;
|
||||
|
||||
if (!remotes.length) {
|
||||
const { stdout } = await execa('git', ['push']);
|
||||
const pushArgs = ['push'];
|
||||
if (!(await hasUpstreamBranch())) {
|
||||
pushArgs.push('--set-upstream', 'origin', await getCurrentBranch());
|
||||
}
|
||||
const { stdout, stderr } = await execa('git', pushArgs);
|
||||
if (stdout) outro(stdout);
|
||||
displayPushUrl(stderr);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@@ -131,11 +178,11 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
|
||||
|
||||
const { stdout } = await execa('git', [
|
||||
'push',
|
||||
'--verbose',
|
||||
remotes[0]
|
||||
]);
|
||||
const pushArgs = ['push', '--verbose', remotes[0]];
|
||||
if (!(await hasUpstreamBranch())) {
|
||||
pushArgs.push('--set-upstream', await getCurrentBranch());
|
||||
}
|
||||
const { stdout, stderr } = await execa('git', pushArgs);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green('✔')} Successfully pushed all commits to ${
|
||||
@@ -144,6 +191,7 @@ ${chalk.grey('——————————————————')}`
|
||||
);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
displayPushUrl(stderr);
|
||||
} else {
|
||||
outro('`git push` aborted');
|
||||
process.exit(0);
|
||||
@@ -165,7 +213,11 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
const pushArgs = ['push', selectedRemote];
|
||||
if (!(await hasUpstreamBranch())) {
|
||||
pushArgs.push('--set-upstream', await getCurrentBranch());
|
||||
}
|
||||
const { stdout, stderr } = await execa('git', pushArgs);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
|
||||
@@ -174,6 +226,8 @@ ${chalk.grey('——————————————————')}`
|
||||
'✔'
|
||||
)} successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
|
||||
displayPushUrl(stderr);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -187,7 +241,9 @@ ${chalk.grey('——————————————————')}`
|
||||
await generateCommitMessageFromGitDiff({
|
||||
diff,
|
||||
extraArgs,
|
||||
fullGitMojiSpec
|
||||
context,
|
||||
fullGitMojiSpec,
|
||||
skipCommitConfirmation
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -196,10 +252,13 @@ ${chalk.grey('——————————————————')}`
|
||||
`${chalk.red('✖')} Failed to generate the commit message`
|
||||
);
|
||||
|
||||
console.log(error);
|
||||
const errorConfig = getConfig();
|
||||
const provider = errorConfig.OCO_AI_PROVIDER || 'openai';
|
||||
const formatted = formatUserFriendlyError(error, provider, {
|
||||
baseURL: errorConfig.OCO_API_URL
|
||||
});
|
||||
outro(printFormattedError(formatted));
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -25,9 +25,12 @@ export enum CONFIG_KEYS {
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
|
||||
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
|
||||
OCO_API_URL = 'OCO_API_URL',
|
||||
OCO_PROXY = 'OCO_PROXY',
|
||||
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
|
||||
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT',
|
||||
OCO_OLLAMA_THINK = 'OCO_OLLAMA_THINK'
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
@@ -67,10 +70,11 @@ export const MODEL_LIST = {
|
||||
],
|
||||
|
||||
anthropic: [
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307'
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-3-7-sonnet-20250219',
|
||||
'claude-3-5-sonnet-20241022',
|
||||
'claude-3-5-haiku-20241022'
|
||||
],
|
||||
|
||||
gemini: [
|
||||
@@ -132,9 +136,113 @@ export const MODEL_LIST = {
|
||||
'mistral-moderation-2411',
|
||||
'mistral-moderation-latest'
|
||||
],
|
||||
|
||||
deepseek: ['deepseek-chat', 'deepseek-reasoner'],
|
||||
|
||||
// AI/ML API available chat-completion models
|
||||
// https://api.aimlapi.com/v1/models
|
||||
aimlapi: [
|
||||
'openai/gpt-4o',
|
||||
'gpt-4o-2024-08-06',
|
||||
'gpt-4o-2024-05-13',
|
||||
'gpt-4o-mini',
|
||||
'gpt-4o-mini-2024-07-18',
|
||||
'chatgpt-4o-latest',
|
||||
'gpt-4-turbo',
|
||||
'gpt-4-turbo-2024-04-09',
|
||||
'gpt-4',
|
||||
'gpt-4-0125-preview',
|
||||
'gpt-4-1106-preview',
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0125',
|
||||
'gpt-3.5-turbo-1106',
|
||||
'o1-preview',
|
||||
'o1-preview-2024-09-12',
|
||||
'o1-mini',
|
||||
'o1-mini-2024-09-12',
|
||||
'o3-mini',
|
||||
'gpt-4o-audio-preview',
|
||||
'gpt-4o-mini-audio-preview',
|
||||
'gpt-4o-search-preview',
|
||||
'gpt-4o-mini-search-preview',
|
||||
'openai/gpt-4.1-2025-04-14',
|
||||
'openai/gpt-4.1-mini-2025-04-14',
|
||||
'openai/gpt-4.1-nano-2025-04-14',
|
||||
'openai/o4-mini-2025-04-16',
|
||||
'openai/o3-2025-04-16',
|
||||
'o1',
|
||||
'openai/o3-pro',
|
||||
'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
|
||||
'google/gemma-2-27b-it',
|
||||
'meta-llama/Llama-Vision-Free',
|
||||
'Qwen/Qwen2-72B-Instruct',
|
||||
'mistralai/Mixtral-8x7B-Instruct-v0.1',
|
||||
'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
|
||||
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
|
||||
'meta-llama/Llama-3.3-70B-Instruct-Turbo',
|
||||
'meta-llama/Llama-3.2-3B-Instruct-Turbo',
|
||||
'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo',
|
||||
'meta-llama/Llama-Guard-3-11B-Vision-Turbo',
|
||||
'Qwen/Qwen2.5-7B-Instruct-Turbo',
|
||||
'Qwen/Qwen2.5-Coder-32B-Instruct',
|
||||
'meta-llama/Meta-Llama-3-8B-Instruct-Lite',
|
||||
'meta-llama/Llama-3-8b-chat-hf',
|
||||
'meta-llama/Llama-3-70b-chat-hf',
|
||||
'Qwen/Qwen2.5-72B-Instruct-Turbo',
|
||||
'Qwen/QwQ-32B',
|
||||
'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
|
||||
'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',
|
||||
'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',
|
||||
'mistralai/Mistral-7B-Instruct-v0.2',
|
||||
'meta-llama/LlamaGuard-2-8b',
|
||||
'mistralai/Mistral-7B-Instruct-v0.1',
|
||||
'mistralai/Mistral-7B-Instruct-v0.3',
|
||||
'meta-llama/Meta-Llama-Guard-3-8B',
|
||||
'meta-llama/llama-4-scout',
|
||||
'meta-llama/llama-4-maverick',
|
||||
'Qwen/Qwen3-235B-A22B-fp8-tput',
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-haiku-20240307',
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-5-sonnet-20241022',
|
||||
'claude-3-5-haiku-20241022',
|
||||
'claude-3-7-sonnet-20250219',
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'google/gemini-2.0-flash-exp',
|
||||
'google/gemini-2.0-flash',
|
||||
'google/gemini-2.5-pro',
|
||||
'google/gemini-2.5-flash',
|
||||
'deepseek-chat',
|
||||
'deepseek-reasoner',
|
||||
'qwen-max',
|
||||
'qwen-plus',
|
||||
'qwen-turbo',
|
||||
'qwen-max-2025-01-25',
|
||||
'mistralai/mistral-tiny',
|
||||
'mistralai/mistral-nemo',
|
||||
'anthracite-org/magnum-v4-72b',
|
||||
'nvidia/llama-3.1-nemotron-70b-instruct',
|
||||
'cohere/command-r-plus',
|
||||
'mistralai/codestral-2501',
|
||||
'google/gemma-3-4b-it',
|
||||
'google/gemma-3-12b-it',
|
||||
'google/gemma-3-27b-it',
|
||||
'google/gemini-2.5-flash-lite-preview',
|
||||
'deepseek/deepseek-prover-v2',
|
||||
'google/gemma-3n-e4b-it',
|
||||
'cohere/command-a',
|
||||
'MiniMax-Text-01',
|
||||
'abab6.5s-chat',
|
||||
'minimax/m1',
|
||||
'bagoodex/bagoodex-search-v1',
|
||||
'moonshot/kimi-k2-preview',
|
||||
'perplexity/sonar',
|
||||
'perplexity/sonar-pro',
|
||||
'x-ai/grok-4-07-09',
|
||||
'x-ai/grok-3-beta',
|
||||
'x-ai/grok-3-mini-beta'
|
||||
],
|
||||
|
||||
// OpenRouter available models
|
||||
// input_modalities: 'text'
|
||||
// output_modalities: 'text'
|
||||
@@ -483,6 +591,8 @@ const getDefaultModel = (provider: string | undefined): string => {
|
||||
return MODEL_LIST.mistral[0];
|
||||
case 'deepseek':
|
||||
return MODEL_LIST.deepseek[0];
|
||||
case 'aimlapi':
|
||||
return MODEL_LIST.aimlapi[0];
|
||||
case 'openrouter':
|
||||
return MODEL_LIST.openrouter[0];
|
||||
default:
|
||||
@@ -613,7 +723,17 @@ export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_API_URL](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_API_URL,
|
||||
typeof value === 'string',
|
||||
typeof value === 'string' && /^(https?:\/\/)/.test(value),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_PROXY](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_PROXY,
|
||||
value === null ||
|
||||
(typeof value === 'string' && /^(https?:\/\/)/.test(value)),
|
||||
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
|
||||
);
|
||||
return value;
|
||||
@@ -675,9 +795,10 @@ export const configValidators = {
|
||||
'flowise',
|
||||
'groq',
|
||||
'deepseek',
|
||||
'aimlapi',
|
||||
'openrouter'
|
||||
].includes(value) || value.startsWith('ollama'),
|
||||
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
|
||||
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -711,6 +832,23 @@ export const configValidators = {
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_OLLAMA_THINK](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OLLAMA_THINK,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -726,14 +864,44 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
MISTRAL = 'mistral',
|
||||
MLX = 'mlx',
|
||||
DEEPSEEK = 'deepseek',
|
||||
AIMLAPI = 'aimlapi',
|
||||
OPENROUTER = 'openrouter'
|
||||
}
|
||||
|
||||
export const PROVIDER_API_KEY_URLS: Record<string, string | null> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'https://platform.openai.com/api-keys',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]:
|
||||
'https://console.anthropic.com/settings/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'https://aistudio.google.com/app/apikey',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'https://console.groq.com/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'https://console.mistral.ai/api-keys/',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'https://platform.deepseek.com/api_keys',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'https://openrouter.ai/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'https://aimlapi.com/app/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.AZURE]: 'https://portal.azure.com/',
|
||||
[OCO_AI_PROVIDER_ENUM.OLLAMA]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.MLX]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.FLOWISE]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.TEST]: null
|
||||
};
|
||||
|
||||
export const RECOMMENDED_MODELS: Record<string, string> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'gpt-4o-mini',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]: 'claude-sonnet-4-20250514',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'gemini-1.5-flash',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'llama3-70b-8192',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'mistral-small-latest',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'deepseek-chat',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'openai/gpt-4o-mini',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'gpt-4o-mini'
|
||||
};
|
||||
|
||||
export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_PROXY]?: string | null;
|
||||
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
@@ -747,6 +915,8 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
|
||||
[CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean;
|
||||
[CONFIG_KEYS.OCO_OLLAMA_THINK]?: boolean;
|
||||
};
|
||||
|
||||
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
@@ -794,7 +964,8 @@ export const DEFAULT_CONFIG = {
|
||||
OCO_TEST_MOCK_TYPE: 'commit-message',
|
||||
OCO_WHY: false,
|
||||
OCO_OMIT_SCOPE: false,
|
||||
OCO_GITPUSH: true // todo: deprecate
|
||||
OCO_GITPUSH: true, // todo: deprecate
|
||||
OCO_HOOK_AUTO_UNCOMMENT: false
|
||||
};
|
||||
|
||||
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
@@ -816,6 +987,7 @@ const getEnvConfig = (envPath: string) => {
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_PROXY: process.env.OCO_PROXY,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
@@ -853,16 +1025,13 @@ export const getIsGlobalConfigFileExist = (
|
||||
};
|
||||
|
||||
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
let globalConfig: ConfigType;
|
||||
|
||||
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
globalConfig = iniParse(configFile) as ConfigType;
|
||||
if (!isGlobalConfigFileExist) {
|
||||
return { ...DEFAULT_CONFIG };
|
||||
}
|
||||
|
||||
return globalConfig;
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
return iniParse(configFile) as ConfigType;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -875,7 +1044,10 @@ export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => {
|
||||
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]);
|
||||
return Array.from(allKeys).reduce((acc, key) => {
|
||||
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
|
||||
const mainValue = main[key];
|
||||
acc[key] = parseConfigVarValue(
|
||||
mainValue !== undefined ? mainValue : fallback[key]
|
||||
);
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
};
|
||||
@@ -1041,11 +1213,24 @@ function getConfigKeyDetails(key) {
|
||||
'Custom API URL - may be used to set proxy path to OpenAI API',
|
||||
values: ["URL string (must start with 'http://' or 'https://')"]
|
||||
};
|
||||
case CONFIG_KEYS.OCO_PROXY:
|
||||
return {
|
||||
description: 'HTTP/HTTPS Proxy URL',
|
||||
values: [
|
||||
"URL string (must start with 'http://' or 'https://')",
|
||||
'null (disable proxy even when HTTP_PROXY/HTTPS_PROXY are set)'
|
||||
]
|
||||
};
|
||||
case CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
return {
|
||||
description: 'Message template placeholder',
|
||||
values: ['String (must start with $)']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT:
|
||||
return {
|
||||
description: 'Automatically uncomment the commit message in the hook',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
default:
|
||||
return {
|
||||
description: 'String value',
|
||||
|
||||
155
src/commands/models.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { intro, outro, spinner } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
import { MODEL_LIST, OCO_AI_PROVIDER_ENUM, getConfig } from './config';
|
||||
import {
|
||||
fetchModelsForProvider,
|
||||
clearModelCache,
|
||||
getCacheInfo,
|
||||
getCachedModels
|
||||
} from '../utils/modelCache';
|
||||
|
||||
function formatCacheAge(timestamp: number | null): string {
|
||||
if (!timestamp) return 'never';
|
||||
const ageMs = Date.now() - timestamp;
|
||||
const days = Math.floor(ageMs / (1000 * 60 * 60 * 24));
|
||||
const hours = Math.floor(ageMs / (1000 * 60 * 60));
|
||||
const minutes = Math.floor(ageMs / (1000 * 60));
|
||||
|
||||
if (days > 0) {
|
||||
return `${days} day${days === 1 ? '' : 's'} ago`;
|
||||
} else if (hours > 0) {
|
||||
return `${hours} hour${hours === 1 ? '' : 's'} ago`;
|
||||
} else if (minutes > 0) {
|
||||
return `${minutes} minute${minutes === 1 ? '' : 's'} ago`;
|
||||
}
|
||||
return 'just now';
|
||||
}
|
||||
|
||||
async function listModels(
|
||||
provider: string,
|
||||
useCache: boolean = true
|
||||
): Promise<void> {
|
||||
const config = getConfig();
|
||||
const apiKey = config.OCO_API_KEY;
|
||||
const currentModel = config.OCO_MODEL;
|
||||
|
||||
// Get cached models or fetch new ones
|
||||
let models: string[] = [];
|
||||
|
||||
if (useCache) {
|
||||
const cached = getCachedModels(provider);
|
||||
if (cached) {
|
||||
models = cached;
|
||||
}
|
||||
}
|
||||
|
||||
if (models.length === 0) {
|
||||
// Fallback to hardcoded list
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
models = MODEL_LIST[providerKey] || [];
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\n${chalk.bold('Available models for')} ${chalk.cyan(provider)}:\n`
|
||||
);
|
||||
|
||||
if (models.length === 0) {
|
||||
console.log(chalk.dim(' No models found'));
|
||||
} else {
|
||||
models.forEach((model) => {
|
||||
const isCurrent = model === currentModel;
|
||||
const prefix = isCurrent ? chalk.green('* ') : ' ';
|
||||
const label = isCurrent ? chalk.green(model) : model;
|
||||
console.log(`${prefix}${label}`);
|
||||
});
|
||||
}
|
||||
|
||||
console.log('');
|
||||
}
|
||||
|
||||
async function refreshModels(provider: string): Promise<void> {
|
||||
const config = getConfig();
|
||||
const apiKey = config.OCO_API_KEY;
|
||||
|
||||
const loadingSpinner = spinner();
|
||||
loadingSpinner.start(`Fetching models from ${provider}...`);
|
||||
|
||||
// Clear cache first
|
||||
clearModelCache();
|
||||
|
||||
try {
|
||||
const models = await fetchModelsForProvider(
|
||||
provider,
|
||||
apiKey,
|
||||
undefined,
|
||||
true
|
||||
);
|
||||
loadingSpinner.stop(`${chalk.green('+')} Fetched ${models.length} models`);
|
||||
|
||||
// List the models
|
||||
await listModels(provider, true);
|
||||
} catch (error) {
|
||||
loadingSpinner.stop(chalk.red('Failed to fetch models'));
|
||||
console.error(
|
||||
chalk.red(
|
||||
`Error: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const modelsCommand = command(
|
||||
{
|
||||
name: COMMANDS.models,
|
||||
help: {
|
||||
description: 'List and manage cached models for your AI provider'
|
||||
},
|
||||
flags: {
|
||||
refresh: {
|
||||
type: Boolean,
|
||||
alias: 'r',
|
||||
description: 'Clear cache and re-fetch models from the provider',
|
||||
default: false
|
||||
},
|
||||
provider: {
|
||||
type: String,
|
||||
alias: 'p',
|
||||
description: 'Specify provider (defaults to current OCO_AI_PROVIDER)'
|
||||
}
|
||||
}
|
||||
},
|
||||
async ({ flags }) => {
|
||||
const config = getConfig();
|
||||
const provider =
|
||||
flags.provider || config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
|
||||
|
||||
intro(chalk.bgCyan(' OpenCommit Models '));
|
||||
|
||||
// Show cache info
|
||||
const cacheInfo = getCacheInfo();
|
||||
if (cacheInfo.timestamp) {
|
||||
console.log(
|
||||
chalk.dim(
|
||||
` Cache last updated: ${formatCacheAge(cacheInfo.timestamp)}`
|
||||
)
|
||||
);
|
||||
if (cacheInfo.providers.length > 0) {
|
||||
console.log(
|
||||
chalk.dim(` Cached providers: ${cacheInfo.providers.join(', ')}`)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
console.log(chalk.dim(' No cached models'));
|
||||
}
|
||||
|
||||
if (flags.refresh) {
|
||||
await refreshModels(provider);
|
||||
} else {
|
||||
await listModels(provider);
|
||||
}
|
||||
|
||||
outro(`Run ${chalk.cyan('oco models --refresh')} to update the model list`);
|
||||
}
|
||||
);
|
||||
@@ -56,10 +56,14 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
const fileContent = await fs.readFile(messageFilePath);
|
||||
|
||||
await fs.writeFile(
|
||||
messageFilePath,
|
||||
commitMessage + '\n' + fileContent.toString()
|
||||
);
|
||||
const messageWithComment = `# ${commitMessage}\n\n# ---------- [OpenCommit] ---------- #\n# Remove the # above to use this generated commit message.\n# To cancel the commit, just close this window without making any changes.\n\n${fileContent.toString()}`;
|
||||
const messageWithoutComment = `${commitMessage}\n\n${fileContent.toString()}`;
|
||||
|
||||
const message = config.OCO_HOOK_AUTO_UNCOMMENT
|
||||
? messageWithoutComment
|
||||
: messageWithComment;
|
||||
|
||||
await fs.writeFile(messageFilePath, message);
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${error}`);
|
||||
process.exit(1);
|
||||
|
||||
490
src/commands/setup.ts
Normal file
@@ -0,0 +1,490 @@
|
||||
import { intro, outro, select, text, isCancel, spinner } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
MODEL_LIST,
|
||||
OCO_AI_PROVIDER_ENUM,
|
||||
getConfig,
|
||||
setGlobalConfig,
|
||||
getGlobalConfig,
|
||||
getIsGlobalConfigFileExist,
|
||||
DEFAULT_CONFIG,
|
||||
PROVIDER_API_KEY_URLS,
|
||||
RECOMMENDED_MODELS
|
||||
} from './config';
|
||||
import {
|
||||
fetchModelsForProvider,
|
||||
fetchOllamaModels,
|
||||
getCacheInfo
|
||||
} from '../utils/modelCache';
|
||||
|
||||
const PROVIDER_DISPLAY_NAMES: Record<string, string> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'OpenAI (GPT-4o, GPT-4)',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]: 'Anthropic (Claude Sonnet, Opus)',
|
||||
[OCO_AI_PROVIDER_ENUM.OLLAMA]: 'Ollama (Free, runs locally)',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'Google Gemini',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'Groq (Fast inference, free tier)',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'Mistral AI',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'DeepSeek',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'OpenRouter (Multiple providers)',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'AI/ML API',
|
||||
[OCO_AI_PROVIDER_ENUM.AZURE]: 'Azure OpenAI',
|
||||
[OCO_AI_PROVIDER_ENUM.MLX]: 'MLX (Apple Silicon, local)'
|
||||
};
|
||||
|
||||
const PRIMARY_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.OPENAI,
|
||||
OCO_AI_PROVIDER_ENUM.ANTHROPIC,
|
||||
OCO_AI_PROVIDER_ENUM.OLLAMA
|
||||
];
|
||||
|
||||
const OTHER_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.GEMINI,
|
||||
OCO_AI_PROVIDER_ENUM.GROQ,
|
||||
OCO_AI_PROVIDER_ENUM.MISTRAL,
|
||||
OCO_AI_PROVIDER_ENUM.DEEPSEEK,
|
||||
OCO_AI_PROVIDER_ENUM.OPENROUTER,
|
||||
OCO_AI_PROVIDER_ENUM.AIMLAPI,
|
||||
OCO_AI_PROVIDER_ENUM.AZURE,
|
||||
OCO_AI_PROVIDER_ENUM.MLX
|
||||
];
|
||||
|
||||
const NO_API_KEY_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.OLLAMA,
|
||||
OCO_AI_PROVIDER_ENUM.MLX,
|
||||
OCO_AI_PROVIDER_ENUM.TEST
|
||||
];
|
||||
|
||||
const MODEL_REQUIRED_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.OLLAMA,
|
||||
OCO_AI_PROVIDER_ENUM.MLX
|
||||
];
|
||||
|
||||
async function selectProvider(): Promise<string | symbol> {
|
||||
const primaryOptions = PRIMARY_PROVIDERS.map((provider) => ({
|
||||
value: provider,
|
||||
label: PROVIDER_DISPLAY_NAMES[provider] || provider
|
||||
}));
|
||||
|
||||
primaryOptions.push({
|
||||
value: 'other',
|
||||
label: 'Other providers...'
|
||||
});
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select your AI provider:',
|
||||
options: primaryOptions
|
||||
});
|
||||
|
||||
if (isCancel(selection)) return selection;
|
||||
|
||||
if (selection === 'other') {
|
||||
const otherOptions = OTHER_PROVIDERS.map((provider) => ({
|
||||
value: provider,
|
||||
label: PROVIDER_DISPLAY_NAMES[provider] || provider
|
||||
}));
|
||||
|
||||
return await select({
|
||||
message: 'Select provider:',
|
||||
options: otherOptions
|
||||
});
|
||||
}
|
||||
|
||||
return selection;
|
||||
}
|
||||
|
||||
async function getApiKey(provider: string): Promise<string | symbol> {
|
||||
const url =
|
||||
PROVIDER_API_KEY_URLS[provider as keyof typeof PROVIDER_API_KEY_URLS];
|
||||
|
||||
let message = `Enter your ${provider} API key:`;
|
||||
if (url) {
|
||||
message = `Enter your API key:\n${chalk.dim(` Get your key at: ${url}`)}`;
|
||||
}
|
||||
|
||||
return await text({
|
||||
message,
|
||||
placeholder: 'sk-...',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'API key is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function formatCacheAge(timestamp: number | null): string {
|
||||
if (!timestamp) return '';
|
||||
const ageMs = Date.now() - timestamp;
|
||||
const days = Math.floor(ageMs / (1000 * 60 * 60 * 24));
|
||||
const hours = Math.floor(ageMs / (1000 * 60 * 60));
|
||||
|
||||
if (days > 0) {
|
||||
return `${days} day${days === 1 ? '' : 's'} ago`;
|
||||
} else if (hours > 0) {
|
||||
return `${hours} hour${hours === 1 ? '' : 's'} ago`;
|
||||
}
|
||||
return 'just now';
|
||||
}
|
||||
|
||||
async function selectModel(
|
||||
provider: string,
|
||||
apiKey?: string
|
||||
): Promise<string | symbol> {
|
||||
const providerDisplayName =
|
||||
PROVIDER_DISPLAY_NAMES[provider]?.split(' (')[0] || provider;
|
||||
const loadingSpinner = spinner();
|
||||
loadingSpinner.start(`Fetching models from ${providerDisplayName}...`);
|
||||
|
||||
let models: string[] = [];
|
||||
let usedFallback = false;
|
||||
|
||||
try {
|
||||
models = await fetchModelsForProvider(provider, apiKey);
|
||||
} catch {
|
||||
// Fall back to hardcoded list
|
||||
usedFallback = true;
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
models = MODEL_LIST[providerKey] || [];
|
||||
}
|
||||
|
||||
// Check cache info for display
|
||||
const cacheInfo = getCacheInfo();
|
||||
const cacheAge = formatCacheAge(cacheInfo.timestamp);
|
||||
|
||||
if (usedFallback) {
|
||||
loadingSpinner.stop(
|
||||
chalk.yellow('Could not fetch models from API. Using default list.')
|
||||
);
|
||||
} else if (cacheAge) {
|
||||
loadingSpinner.stop(`Models loaded ${chalk.dim(`(cached ${cacheAge})`)}`);
|
||||
} else {
|
||||
loadingSpinner.stop('Models loaded');
|
||||
}
|
||||
|
||||
if (models.length === 0) {
|
||||
// For Ollama/MLX, prompt for manual entry
|
||||
if (NO_API_KEY_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
|
||||
return await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Use default from config
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
return MODEL_LIST[providerKey]?.[0] || 'gpt-4o-mini';
|
||||
}
|
||||
|
||||
// Get recommended model for this provider
|
||||
const recommended =
|
||||
RECOMMENDED_MODELS[provider as keyof typeof RECOMMENDED_MODELS];
|
||||
|
||||
// Build options with recommended first
|
||||
const options: Array<{ value: string; label: string }> = [];
|
||||
|
||||
if (recommended && models.includes(recommended)) {
|
||||
options.push({
|
||||
value: recommended,
|
||||
label: `${recommended} (Recommended)`
|
||||
});
|
||||
}
|
||||
|
||||
// Add other models (first 10, excluding recommended)
|
||||
const otherModels = models.filter((m) => m !== recommended).slice(0, 10);
|
||||
|
||||
otherModels.forEach((model) => {
|
||||
options.push({ value: model, label: model });
|
||||
});
|
||||
|
||||
// Add option to see all or enter custom
|
||||
if (models.length > 11) {
|
||||
options.push({ value: '__show_all__', label: 'Show all models...' });
|
||||
}
|
||||
options.push({ value: '__custom__', label: 'Enter custom model...' });
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select a model:',
|
||||
options
|
||||
});
|
||||
|
||||
if (isCancel(selection)) return selection;
|
||||
|
||||
if (selection === '__show_all__') {
|
||||
const allOptions = models.map((model) => ({
|
||||
value: model,
|
||||
label: model === recommended ? `${model} (Recommended)` : model
|
||||
}));
|
||||
|
||||
return await select({
|
||||
message: 'Select a model:',
|
||||
options: allOptions
|
||||
});
|
||||
}
|
||||
|
||||
if (selection === '__custom__') {
|
||||
return await text({
|
||||
message: 'Enter model name:',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return selection;
|
||||
}
|
||||
|
||||
async function setupOllama(): Promise<{
|
||||
provider: string;
|
||||
model: string;
|
||||
apiUrl: string;
|
||||
} | null> {
|
||||
console.log(chalk.cyan('\n Ollama - Free Local AI\n'));
|
||||
console.log(chalk.dim(' Setup steps:'));
|
||||
console.log(chalk.dim(' 1. Install: https://ollama.ai/download'));
|
||||
console.log(chalk.dim(' 2. Pull a model: ollama pull llama3:8b'));
|
||||
console.log(chalk.dim(' 3. Start server: ollama serve\n'));
|
||||
|
||||
// Try to fetch available models
|
||||
const loadingSpinner = spinner();
|
||||
loadingSpinner.start('Checking for local Ollama installation...');
|
||||
|
||||
const defaultUrl = 'http://localhost:11434';
|
||||
let ollamaModels: string[] = [];
|
||||
|
||||
try {
|
||||
ollamaModels = await fetchOllamaModels(defaultUrl);
|
||||
if (ollamaModels.length > 0) {
|
||||
loadingSpinner.stop(
|
||||
`${chalk.green('✔')} Found ${ollamaModels.length} local model(s)`
|
||||
);
|
||||
} else {
|
||||
loadingSpinner.stop(
|
||||
chalk.yellow(
|
||||
'Ollama is running but no models found. Pull a model first: ollama pull llama3:8b'
|
||||
)
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
loadingSpinner.stop(
|
||||
chalk.yellow(
|
||||
'Could not connect to Ollama. Make sure it is running: ollama serve'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Model selection
|
||||
let model: string | symbol;
|
||||
if (ollamaModels.length > 0) {
|
||||
model = await select({
|
||||
message: 'Select a model:',
|
||||
options: [
|
||||
...ollamaModels.map((m) => ({ value: m, label: m })),
|
||||
{ value: '__custom__', label: 'Enter custom model name...' }
|
||||
]
|
||||
});
|
||||
|
||||
if (isCancel(model)) return null;
|
||||
|
||||
if (model === '__custom__') {
|
||||
model = await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b'
|
||||
});
|
||||
}
|
||||
} else {
|
||||
model = await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (isCancel(model)) return null;
|
||||
|
||||
// API URL (optional)
|
||||
const apiUrl = await text({
|
||||
message: 'Ollama URL (press Enter for default):',
|
||||
placeholder: defaultUrl,
|
||||
defaultValue: defaultUrl
|
||||
});
|
||||
|
||||
if (isCancel(apiUrl)) return null;
|
||||
|
||||
return {
|
||||
provider: OCO_AI_PROVIDER_ENUM.OLLAMA,
|
||||
model: model as string,
|
||||
apiUrl: (apiUrl as string) || defaultUrl
|
||||
};
|
||||
}
|
||||
|
||||
export async function runSetup(): Promise<boolean> {
|
||||
intro(chalk.bgCyan(' Welcome to OpenCommit! '));
|
||||
|
||||
// Select provider
|
||||
const provider = await selectProvider();
|
||||
if (isCancel(provider)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
let config: Partial<Record<string, any>> = {};
|
||||
|
||||
// Handle Ollama specially
|
||||
if (provider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
|
||||
const ollamaConfig = await setupOllama();
|
||||
if (!ollamaConfig) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: ollamaConfig.provider,
|
||||
OCO_MODEL: ollamaConfig.model,
|
||||
OCO_API_URL: ollamaConfig.apiUrl,
|
||||
OCO_API_KEY: 'ollama' // Placeholder
|
||||
};
|
||||
} else if (provider === OCO_AI_PROVIDER_ENUM.MLX) {
|
||||
// MLX setup
|
||||
console.log(chalk.cyan('\n MLX - Apple Silicon Local AI\n'));
|
||||
console.log(chalk.dim(' MLX runs locally on Apple Silicon Macs.'));
|
||||
console.log(chalk.dim(' No API key required.\n'));
|
||||
|
||||
const model = await text({
|
||||
message: 'Enter model name:',
|
||||
placeholder: 'mlx-community/Llama-3-8B-Instruct-4bit'
|
||||
});
|
||||
|
||||
if (isCancel(model)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.MLX,
|
||||
OCO_MODEL: model,
|
||||
OCO_API_KEY: 'mlx' // Placeholder
|
||||
};
|
||||
} else {
|
||||
// Standard provider flow: API key then model
|
||||
const apiKey = await getApiKey(provider as string);
|
||||
if (isCancel(apiKey)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
const model = await selectModel(provider as string, apiKey as string);
|
||||
if (isCancel(model)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: provider,
|
||||
OCO_API_KEY: apiKey,
|
||||
OCO_MODEL: model
|
||||
};
|
||||
}
|
||||
|
||||
// Save configuration
|
||||
const existingConfig = getIsGlobalConfigFileExist()
|
||||
? getGlobalConfig()
|
||||
: DEFAULT_CONFIG;
|
||||
|
||||
const newConfig = {
|
||||
...existingConfig,
|
||||
...config
|
||||
};
|
||||
|
||||
setGlobalConfig(newConfig as any);
|
||||
|
||||
outro(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Configuration saved to ~/.opencommit\n\n Run ${chalk.cyan(
|
||||
'oco'
|
||||
)} to generate commit messages!`
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function isFirstRun(): boolean {
|
||||
const hasGlobalConfig = getIsGlobalConfigFileExist();
|
||||
const config = getConfig();
|
||||
|
||||
const provider = config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
|
||||
|
||||
if (provider === OCO_AI_PROVIDER_ENUM.TEST) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const hasRequiredConfig = MODEL_REQUIRED_PROVIDERS.includes(
|
||||
provider as OCO_AI_PROVIDER_ENUM
|
||||
)
|
||||
? Boolean(config.OCO_MODEL)
|
||||
: Boolean(config.OCO_API_KEY);
|
||||
|
||||
// Trigger the full setup wizard only when nothing usable was configured yet.
|
||||
return !hasGlobalConfig && !hasRequiredConfig;
|
||||
}
|
||||
|
||||
export async function promptForMissingApiKey(): Promise<boolean> {
|
||||
const config = getConfig();
|
||||
const provider = config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
|
||||
|
||||
if (NO_API_KEY_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
|
||||
return true; // No API key needed
|
||||
}
|
||||
|
||||
if (config.OCO_API_KEY) {
|
||||
return true; // Already has key
|
||||
}
|
||||
|
||||
console.log(
|
||||
chalk.yellow(`\nAPI key missing for ${provider}. Let's set it up.\n`)
|
||||
);
|
||||
|
||||
const apiKey = await getApiKey(provider);
|
||||
if (isCancel(apiKey)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_API_KEY: apiKey as string
|
||||
} as any);
|
||||
|
||||
console.log(chalk.green('✔') + ' API key saved\n');
|
||||
return true;
|
||||
}
|
||||
|
||||
export const setupCommand = command(
|
||||
{
|
||||
name: COMMANDS.setup,
|
||||
help: {
|
||||
description: 'Interactive setup wizard for OpenCommit'
|
||||
}
|
||||
},
|
||||
async () => {
|
||||
await runSetup();
|
||||
}
|
||||
);
|
||||
@@ -11,7 +11,9 @@ export interface AiEngineConfig {
|
||||
maxTokensOutput: number;
|
||||
maxTokensInput: number;
|
||||
baseURL?: string;
|
||||
proxy?: string | null;
|
||||
customHeaders?: Record<string, string>;
|
||||
ollamaThink?: boolean;
|
||||
}
|
||||
|
||||
type Client =
|
||||
|
||||
39
src/engine/aimlapi.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import OpenAI from 'openai';
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface AimlApiConfig extends AiEngineConfig {}
|
||||
|
||||
export class AimlApiEngine implements AiEngine {
|
||||
client: AxiosInstance;
|
||||
|
||||
constructor(public config: AimlApiConfig) {
|
||||
this.client = axios.create({
|
||||
baseURL: config.baseURL || 'https://api.aimlapi.com/v1/chat/completions',
|
||||
headers: {
|
||||
Authorization: `Bearer ${config.apiKey}`,
|
||||
'HTTP-Referer': 'https://github.com/di-sukharev/opencommit',
|
||||
'X-Title': 'opencommit',
|
||||
'Content-Type': 'application/json',
|
||||
...config.customHeaders
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null> => {
|
||||
try {
|
||||
const response = await this.client.post('', {
|
||||
model: this.config.model,
|
||||
messages
|
||||
});
|
||||
|
||||
const message = response.data.choices?.[0]?.message;
|
||||
return message?.content ?? null;
|
||||
} catch (error) {
|
||||
throw normalizeEngineError(error, 'aimlapi', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -1,13 +1,12 @@
|
||||
import AnthropicClient from '@anthropic-ai/sdk';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import {
|
||||
MessageCreateParamsNonStreaming,
|
||||
MessageParam
|
||||
} from '@anthropic-ai/sdk/resources/messages.mjs';
|
||||
import { outro } from '@clack/prompts';
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -20,7 +19,14 @@ export class AnthropicEngine implements AiEngine {
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = new AnthropicClient({ apiKey: this.config.apiKey });
|
||||
const clientOptions: any = { apiKey: this.config.apiKey };
|
||||
|
||||
const proxy = config.proxy;
|
||||
if (proxy) {
|
||||
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
|
||||
}
|
||||
|
||||
this.client = new AnthropicClient(clientOptions);
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
@@ -37,9 +43,14 @@ export class AnthropicEngine implements AiEngine {
|
||||
system: systemMessage,
|
||||
messages: restMessages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
};
|
||||
|
||||
// add top_p for non-4.5 models
|
||||
if (!/claude.*-4-5/.test(params.model)) {
|
||||
params.top_p = 0.1;
|
||||
}
|
||||
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
@@ -58,22 +69,7 @@ export class AnthropicEngine implements AiEngine {
|
||||
let content = message;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const anthropicAiError = error.response.data.error;
|
||||
|
||||
if (anthropicAiError?.message) outro(anthropicAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'anthropic', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,11 +2,9 @@ import {
|
||||
AzureKeyCredential,
|
||||
OpenAIClient as AzureOpenAIClient
|
||||
} from '@azure/openai';
|
||||
import { outro } from '@clack/prompts';
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -53,28 +51,11 @@ export class AzureEngine implements AiEngine {
|
||||
if (message?.content === null) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${this.config.model}`);
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${JSON.stringify(error)}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError?.message) outro(openAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'azure', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { OpenAiEngine, OpenAiConfig } from './openAi';
|
||||
@@ -10,9 +10,10 @@ export interface DeepseekConfig extends OpenAiConfig {}
|
||||
export class DeepseekEngine extends OpenAiEngine {
|
||||
constructor(config: DeepseekConfig) {
|
||||
// Call OpenAIEngine constructor with forced Deepseek baseURL
|
||||
// Put baseURL first so user config can override it
|
||||
super({
|
||||
...config,
|
||||
baseURL: 'https://api.deepseek.com/v1'
|
||||
baseURL: 'https://api.deepseek.com/v1',
|
||||
...config
|
||||
});
|
||||
}
|
||||
|
||||
@@ -45,17 +46,7 @@ export class DeepseekEngine extends OpenAiEngine {
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError) throw new Error(openAiError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'deepseek', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -39,9 +40,8 @@ export class FlowiseEngine implements AiEngine {
|
||||
const message = response.data;
|
||||
let content = message?.text;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error('local model issues. details: ' + message);
|
||||
} catch (error) {
|
||||
throw normalizeEngineError(error, 'flowise', this.config.model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,72 @@
|
||||
import {
|
||||
Content,
|
||||
FinishReason,
|
||||
GenerateContentResponse,
|
||||
GoogleGenerativeAI,
|
||||
HarmBlockThreshold,
|
||||
HarmCategory,
|
||||
Part
|
||||
} from '@google/generative-ai';
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface GeminiConfig extends AiEngineConfig {}
|
||||
|
||||
const GEMINI_BLOCKING_FINISH_REASONS = new Set<FinishReason>([
|
||||
FinishReason.RECITATION,
|
||||
FinishReason.SAFETY,
|
||||
FinishReason.LANGUAGE
|
||||
]);
|
||||
|
||||
const formatGeminiBlockMessage = (
|
||||
response: GenerateContentResponse
|
||||
): string => {
|
||||
const promptFeedback = response.promptFeedback;
|
||||
if (promptFeedback?.blockReason) {
|
||||
return promptFeedback.blockReasonMessage
|
||||
? `Gemini response was blocked due to ${promptFeedback.blockReason}: ${promptFeedback.blockReasonMessage}`
|
||||
: `Gemini response was blocked due to ${promptFeedback.blockReason}`;
|
||||
}
|
||||
|
||||
const firstCandidate = response.candidates?.[0];
|
||||
if (firstCandidate?.finishReason) {
|
||||
return firstCandidate.finishMessage
|
||||
? `Gemini response was blocked due to ${firstCandidate.finishReason}: ${firstCandidate.finishMessage}`
|
||||
: `Gemini response was blocked due to ${firstCandidate.finishReason}`;
|
||||
}
|
||||
|
||||
return 'Gemini response did not contain usable text';
|
||||
};
|
||||
|
||||
const extractGeminiText = (response: GenerateContentResponse): string => {
|
||||
const firstCandidate = response.candidates?.[0];
|
||||
|
||||
if (
|
||||
firstCandidate?.finishReason &&
|
||||
GEMINI_BLOCKING_FINISH_REASONS.has(firstCandidate.finishReason)
|
||||
) {
|
||||
throw new Error(formatGeminiBlockMessage(response));
|
||||
}
|
||||
|
||||
const text = firstCandidate?.content?.parts
|
||||
?.flatMap((part) =>
|
||||
'text' in part && typeof part.text === 'string' ? [part.text] : []
|
||||
)
|
||||
.join('');
|
||||
|
||||
if (typeof text === 'string' && text.length > 0) {
|
||||
return text;
|
||||
}
|
||||
|
||||
if (response.promptFeedback?.blockReason) {
|
||||
throw new Error(formatGeminiBlockMessage(response));
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
export class GeminiEngine implements AiEngine {
|
||||
config: GeminiConfig;
|
||||
client: GoogleGenerativeAI;
|
||||
@@ -29,10 +84,15 @@ export class GeminiEngine implements AiEngine {
|
||||
.map((m) => m.content)
|
||||
.join('\n');
|
||||
|
||||
const gemini = this.client.getGenerativeModel({
|
||||
model: this.config.model,
|
||||
systemInstruction
|
||||
});
|
||||
const gemini = this.client.getGenerativeModel(
|
||||
{
|
||||
model: this.config.model,
|
||||
systemInstruction
|
||||
},
|
||||
{
|
||||
baseUrl: this.config.baseURL
|
||||
}
|
||||
);
|
||||
|
||||
const contents = messages
|
||||
.filter((m) => m.role !== 'system')
|
||||
@@ -72,19 +132,10 @@ export class GeminiEngine implements AiEngine {
|
||||
}
|
||||
});
|
||||
|
||||
const content = result.response.text();
|
||||
const content = extractGeminiText(result.response);
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const geminiError = error.response.data.error;
|
||||
if (geminiError) throw new Error(geminiError?.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'gemini', this.config.model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,4 +7,4 @@ export class GroqEngine extends OpenAiEngine {
|
||||
config.baseURL = 'https://api.groq.com/openai/v1';
|
||||
super(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import axios from 'axios';
|
||||
import { Mistral } from '@mistralai/mistralai';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -9,10 +10,6 @@ import { AiEngine, AiEngineConfig } from './Engine';
|
||||
export interface MistralAiConfig extends AiEngineConfig {}
|
||||
export type MistralCompletionMessageParam = Array<any>;
|
||||
|
||||
// Import Mistral dynamically to avoid TS errors
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const Mistral = require('@mistralai/mistralai').Mistral;
|
||||
|
||||
export class MistralAiEngine implements AiEngine {
|
||||
config: MistralAiConfig;
|
||||
client: any; // Using any type for Mistral client to avoid TS errors
|
||||
@@ -23,7 +20,10 @@ export class MistralAiEngine implements AiEngine {
|
||||
if (!config.baseURL) {
|
||||
this.client = new Mistral({ apiKey: config.apiKey });
|
||||
} else {
|
||||
this.client = new Mistral({ apiKey: config.apiKey, serverURL: config.baseURL });
|
||||
this.client = new Mistral({
|
||||
apiKey: config.apiKey,
|
||||
serverURL: config.baseURL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,28 +50,17 @@ export class MistralAiEngine implements AiEngine {
|
||||
|
||||
const completion = await this.client.chat.complete(params);
|
||||
|
||||
if (!completion.choices)
|
||||
throw Error('No completion choice available.')
|
||||
|
||||
if (!completion.choices) throw Error('No completion choice available.');
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
if (!message || !message.content)
|
||||
throw Error('No completion choice available.')
|
||||
throw Error('No completion choice available.');
|
||||
|
||||
let content = message.content as string;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const mistralError = error.response.data.error;
|
||||
|
||||
if (mistralError) throw new Error(mistralError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'mistral', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,47 +1,49 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface MLXConfig extends AiEngineConfig {}
|
||||
|
||||
const DEFAULT_MLX_URL = 'http://localhost:8080';
|
||||
const MLX_CHAT_PATH = '/v1/chat/completions';
|
||||
|
||||
export class MLXEngine implements AiEngine {
|
||||
config: MLXConfig;
|
||||
client: AxiosInstance;
|
||||
config: MLXConfig;
|
||||
client: AxiosInstance;
|
||||
private chatUrl: string;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:8080/v1/chat/completions',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
|
||||
const baseUrl = config.baseURL || DEFAULT_MLX_URL;
|
||||
this.chatUrl = `${baseUrl}${MLX_CHAT_PATH}`;
|
||||
|
||||
this.client = axios.create({
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const params = {
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
repetition_penalty: 1.5,
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(this.chatUrl, params);
|
||||
|
||||
const choices = response.data.choices;
|
||||
const message = choices[0].message;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
throw normalizeEngineError(error, 'mlx', this.config.model);
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>):
|
||||
Promise<string | undefined> {
|
||||
const params = {
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
repetition_penalty: 1.5,
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
|
||||
const choices = response.data.choices;
|
||||
const message = choices[0].message;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`MLX provider error: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,52 +1,56 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
interface OllamaConfig extends AiEngineConfig {
|
||||
ollamaThink?: boolean;
|
||||
}
|
||||
|
||||
const DEFAULT_OLLAMA_URL = 'http://localhost:11434';
|
||||
const OLLAMA_CHAT_PATH = '/api/chat';
|
||||
|
||||
export class OllamaEngine implements AiEngine {
|
||||
config: OllamaConfig;
|
||||
client: AxiosInstance;
|
||||
private chatUrl: string;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
|
||||
|
||||
const baseUrl = config.baseURL || DEFAULT_OLLAMA_URL;
|
||||
this.chatUrl = `${baseUrl}${OLLAMA_CHAT_PATH}`;
|
||||
|
||||
// Combine base headers with custom headers
|
||||
const headers = {
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
...config.customHeaders
|
||||
...config.customHeaders
|
||||
};
|
||||
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:11434/api/chat',
|
||||
headers
|
||||
});
|
||||
|
||||
this.client = axios.create({ headers });
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const params = {
|
||||
const params: Record<string, any> = {
|
||||
model: this.config.model ?? 'mistral',
|
||||
messages,
|
||||
options: { temperature: 0, top_p: 0.1 },
|
||||
stream: false
|
||||
};
|
||||
if (typeof this.config.ollamaThink === 'boolean') {
|
||||
params.think = this.config.ollamaThink;
|
||||
}
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
const response = await this.client.post(this.chatUrl, params);
|
||||
|
||||
const { message } = response.data;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`Ollama provider error: ${message}`);
|
||||
} catch (error) {
|
||||
throw normalizeEngineError(error, 'ollama', this.config.model);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { parseCustomHeaders } from '../utils/engine';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import { parseCustomHeaders } from '../utils/customHeaders';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -18,30 +19,41 @@ export class OpenAiEngine implements AiEngine {
|
||||
const clientOptions: OpenAI.ClientOptions = {
|
||||
apiKey: config.apiKey
|
||||
};
|
||||
|
||||
|
||||
if (config.baseURL) {
|
||||
clientOptions.baseURL = config.baseURL;
|
||||
}
|
||||
|
||||
|
||||
const proxy = config.proxy;
|
||||
if (proxy) {
|
||||
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
|
||||
}
|
||||
|
||||
if (config.customHeaders) {
|
||||
const headers = parseCustomHeaders(config.customHeaders);
|
||||
if (Object.keys(headers).length > 0) {
|
||||
clientOptions.defaultHeaders = headers;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
this.client = new OpenAI(clientOptions);
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null> => {
|
||||
const isReasoningModel = /^(o[1-9]|gpt-5)/.test(this.config.model);
|
||||
|
||||
const params = {
|
||||
model: this.config.model,
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
...(isReasoningModel
|
||||
? { max_completion_tokens: this.config.maxTokensOutput }
|
||||
: {
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: this.config.maxTokensOutput
|
||||
})
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -54,24 +66,16 @@ export class OpenAiEngine implements AiEngine {
|
||||
this.config.maxTokensInput - this.config.maxTokensOutput
|
||||
)
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
|
||||
const completion = await this.client.chat.completions.create(params);
|
||||
|
||||
const completion = await this.client.chat.completions.create(
|
||||
params as OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming
|
||||
);
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openAiError = error.response.data.error;
|
||||
|
||||
if (openAiError) throw new Error(openAiError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'openai', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import OpenAI from 'openai';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { normalizeEngineError } from '../utils/engineErrorHandler';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OpenRouterConfig extends AiEngineConfig {}
|
||||
|
||||
@@ -33,17 +34,7 @@ export class OpenRouterEngine implements AiEngine {
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openRouterError = error.response.data.error;
|
||||
|
||||
if (openRouterError) throw new Error(openRouterError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
throw normalizeEngineError(error, 'openrouter', this.config.model);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,7 +1,22 @@
|
||||
import { select, confirm, isCancel } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { DEFAULT_TOKEN_LIMITS, getConfig } from './commands/config';
|
||||
import {
|
||||
DEFAULT_TOKEN_LIMITS,
|
||||
getConfig,
|
||||
setGlobalConfig,
|
||||
getGlobalConfig,
|
||||
MODEL_LIST,
|
||||
RECOMMENDED_MODELS
|
||||
} from './commands/config';
|
||||
import { getMainCommitPrompt } from './prompts';
|
||||
import { getEngine } from './utils/engine';
|
||||
import {
|
||||
isModelNotFoundError,
|
||||
getSuggestedModels,
|
||||
ModelNotFoundError
|
||||
} from './utils/errors';
|
||||
import { GenerateCommitMessageErrorEnum } from './utils/generateCommitMessageErrors';
|
||||
import { mergeDiffs } from './utils/mergeDiffs';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
@@ -14,7 +29,10 @@ const generateCommitMessageChatCompletionPrompt = async (
|
||||
fullGitMojiSpec: boolean,
|
||||
context: string
|
||||
): Promise<Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>> => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec, context);
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
|
||||
|
||||
@@ -26,11 +44,90 @@ const generateCommitMessageChatCompletionPrompt = async (
|
||||
return chatContextAsCompletionRequest;
|
||||
};
|
||||
|
||||
export enum GenerateCommitMessageErrorEnum {
|
||||
tooMuchTokens = 'TOO_MUCH_TOKENS',
|
||||
internalError = 'INTERNAL_ERROR',
|
||||
emptyMessage = 'EMPTY_MESSAGE',
|
||||
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
|
||||
async function handleModelNotFoundError(
|
||||
error: Error,
|
||||
provider: string,
|
||||
currentModel: string
|
||||
): Promise<string | null> {
|
||||
console.log(chalk.red(`\n✖ Model '${currentModel}' not found\n`));
|
||||
|
||||
const suggestedModels = getSuggestedModels(provider, currentModel);
|
||||
const recommended =
|
||||
RECOMMENDED_MODELS[provider as keyof typeof RECOMMENDED_MODELS];
|
||||
|
||||
if (suggestedModels.length === 0) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`No alternative models available. Run 'oco setup' to configure a different model.`
|
||||
)
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const options: Array<{ value: string; label: string }> = [];
|
||||
|
||||
// Add recommended first if available
|
||||
if (recommended && suggestedModels.includes(recommended)) {
|
||||
options.push({
|
||||
value: recommended,
|
||||
label: `${recommended} (Recommended)`
|
||||
});
|
||||
}
|
||||
|
||||
// Add other suggestions
|
||||
suggestedModels
|
||||
.filter((m) => m !== recommended)
|
||||
.forEach((model) => {
|
||||
options.push({ value: model, label: model });
|
||||
});
|
||||
|
||||
options.push({ value: '__custom__', label: 'Enter custom model...' });
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select an alternative model:',
|
||||
options
|
||||
});
|
||||
|
||||
if (isCancel(selection)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let newModel: string;
|
||||
if (selection === '__custom__') {
|
||||
const { text } = await import('@clack/prompts');
|
||||
const customModel = await text({
|
||||
message: 'Enter model name:',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
|
||||
if (isCancel(customModel)) {
|
||||
return null;
|
||||
}
|
||||
newModel = customModel as string;
|
||||
} else {
|
||||
newModel = selection as string;
|
||||
}
|
||||
|
||||
// Ask if user wants to save as default
|
||||
const saveAsDefault = await confirm({
|
||||
message: 'Save as default model?'
|
||||
});
|
||||
|
||||
if (!isCancel(saveAsDefault) && saveAsDefault) {
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_MODEL: newModel
|
||||
} as any);
|
||||
console.log(chalk.green('√') + ' Model saved as default\n');
|
||||
}
|
||||
|
||||
return newModel;
|
||||
}
|
||||
|
||||
const ADJUSTMENT_FACTOR = 20;
|
||||
@@ -38,8 +135,13 @@ const ADJUSTMENT_FACTOR = 20;
|
||||
export const generateCommitMessageByDiff = async (
|
||||
diff: string,
|
||||
fullGitMojiSpec: boolean = false,
|
||||
context: string = ""
|
||||
context: string = '',
|
||||
retryWithModel?: string
|
||||
): Promise<string> => {
|
||||
const currentConfig = getConfig();
|
||||
const provider = currentConfig.OCO_AI_PROVIDER || 'openai';
|
||||
const currentModel = retryWithModel || currentConfig.OCO_MODEL;
|
||||
|
||||
try {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
|
||||
fullGitMojiSpec,
|
||||
@@ -60,7 +162,8 @@ export const generateCommitMessageByDiff = async (
|
||||
const commitMessagePromises = await getCommitMsgsPromisesFromFileDiffs(
|
||||
diff,
|
||||
MAX_REQUEST_TOKENS,
|
||||
fullGitMojiSpec
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
const commitMessages = [] as string[];
|
||||
@@ -75,7 +178,7 @@ export const generateCommitMessageByDiff = async (
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
diff,
|
||||
fullGitMojiSpec,
|
||||
context,
|
||||
context
|
||||
);
|
||||
|
||||
const engine = getEngine();
|
||||
@@ -86,6 +189,32 @@ export const generateCommitMessageByDiff = async (
|
||||
|
||||
return commitMessage;
|
||||
} catch (error) {
|
||||
// Handle model-not-found errors with interactive recovery
|
||||
if (isModelNotFoundError(error)) {
|
||||
const newModel = await handleModelNotFoundError(
|
||||
error as Error,
|
||||
provider,
|
||||
currentModel
|
||||
);
|
||||
|
||||
if (newModel) {
|
||||
console.log(chalk.cyan(`Retrying with ${newModel}...\n`));
|
||||
// Retry with the new model by updating config temporarily
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_MODEL: newModel
|
||||
} as any);
|
||||
|
||||
return generateCommitMessageByDiff(
|
||||
diff,
|
||||
fullGitMojiSpec,
|
||||
context,
|
||||
newModel
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
@@ -94,7 +223,8 @@ function getMessagesPromisesByChangesInFile(
|
||||
fileDiff: string,
|
||||
separator: string,
|
||||
maxChangeLength: number,
|
||||
fullGitMojiSpec: boolean
|
||||
fullGitMojiSpec: boolean,
|
||||
context: string
|
||||
) {
|
||||
const hunkHeaderSeparator = '@@ ';
|
||||
const [fileHeader, ...fileDiffByLines] = fileDiff.split(hunkHeaderSeparator);
|
||||
@@ -122,7 +252,8 @@ function getMessagesPromisesByChangesInFile(
|
||||
async (lineDiff) => {
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
separator + lineDiff,
|
||||
fullGitMojiSpec
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
return engine.generateCommitMessage(messages);
|
||||
@@ -171,7 +302,8 @@ function splitDiff(diff: string, maxChangeLength: number) {
|
||||
export const getCommitMsgsPromisesFromFileDiffs = async (
|
||||
diff: string,
|
||||
maxDiffLength: number,
|
||||
fullGitMojiSpec: boolean
|
||||
fullGitMojiSpec: boolean,
|
||||
context: string
|
||||
) => {
|
||||
const separator = 'diff --git ';
|
||||
|
||||
@@ -189,14 +321,16 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
|
||||
fileDiff,
|
||||
separator,
|
||||
maxDiffLength,
|
||||
fullGitMojiSpec
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
commitMessagePromises.push(...messagesPromises);
|
||||
} else {
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
separator + fileDiff,
|
||||
fullGitMojiSpec
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
const engine = getEngine();
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"localLanguage": "한국어",
|
||||
"commitFix": "fix(server.ts): 포트 변수를 소문자 port에서 대문자 PORT로 변경",
|
||||
"commitFeat": "피트(server.ts): process.env.PORT 환경 변수 지원 추가",
|
||||
"commitFeat": "feat(server.ts): process.env.PORT 환경 변수 지원 추가",
|
||||
"commitDescription": "포트 변수는 이제 PORT로 이름이 지정되어 상수인 PORT와 일관성 있는 이름 규칙을 따릅니다. 환경 변수 지원을 통해 애플리케이션은 이제 process.env.PORT 환경 변수로 지정된 사용 가능한 모든 포트에서 실행할 수 있으므로 더 유연해졌습니다.",
|
||||
"commitFixOmitScope": "fix: 포트 변수를 소문자 port에서 대문자 PORT로 변경",
|
||||
"commitFeatOmitScope": "피트: process.env.PORT 환경 변수 지원 추가"
|
||||
"commitFeatOmitScope": "feat: process.env.PORT 환경 변수 지원 추가"
|
||||
}
|
||||
|
||||
@@ -36,6 +36,19 @@ export const runMigrations = async () => {
|
||||
const config = getConfig();
|
||||
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
|
||||
|
||||
// skip unhandled providers in migration00
|
||||
if (
|
||||
[
|
||||
OCO_AI_PROVIDER_ENUM.DEEPSEEK,
|
||||
OCO_AI_PROVIDER_ENUM.GROQ,
|
||||
OCO_AI_PROVIDER_ENUM.MISTRAL,
|
||||
OCO_AI_PROVIDER_ENUM.MLX,
|
||||
OCO_AI_PROVIDER_ENUM.OPENROUTER
|
||||
].includes(config.OCO_AI_PROVIDER)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
|
||||
let isMigrated = false;
|
||||
|
||||
@@ -56,10 +56,11 @@ const llmReadableRules: {
|
||||
blankline: (key, applicable) =>
|
||||
`There should ${applicable} be a blank line at the beginning of the ${key}.`,
|
||||
caseRule: (key, applicable, value: string | Array<string>) =>
|
||||
`The ${key} should ${applicable} be in ${Array.isArray(value)
|
||||
? `one of the following case:
|
||||
`The ${key} should ${applicable} be in ${
|
||||
Array.isArray(value)
|
||||
? `one of the following case:
|
||||
- ${value.join('\n - ')}.`
|
||||
: `${value} case.`
|
||||
: `${value} case.`
|
||||
}`,
|
||||
emptyRule: (key, applicable) => `The ${key} should ${applicable} be empty.`,
|
||||
enumRule: (key, applicable, value: string | Array<string>) =>
|
||||
@@ -67,17 +68,18 @@ const llmReadableRules: {
|
||||
- ${Array.isArray(value) ? value.join('\n - ') : value}.`,
|
||||
enumTypeRule: (key, applicable, value: string | Array<string>, prompt) =>
|
||||
`The ${key} should ${applicable} be one of the following values:
|
||||
- ${Array.isArray(value)
|
||||
- ${
|
||||
Array.isArray(value)
|
||||
? value
|
||||
.map((v) => {
|
||||
const description = getTypeRuleExtraDescription(v, prompt);
|
||||
if (description) {
|
||||
return `${v} (${description})`;
|
||||
} else return v;
|
||||
})
|
||||
.join('\n - ')
|
||||
.map((v) => {
|
||||
const description = getTypeRuleExtraDescription(v, prompt);
|
||||
if (description) {
|
||||
return `${v} (${description})`;
|
||||
} else return v;
|
||||
})
|
||||
.join('\n - ')
|
||||
: value
|
||||
}.`,
|
||||
}.`,
|
||||
fullStopRule: (key, applicable, value: string) =>
|
||||
`The ${key} should ${applicable} end with '${value}'.`,
|
||||
maxLengthRule: (key, applicable, value: string) =>
|
||||
@@ -214,16 +216,20 @@ const STRUCTURE_OF_COMMIT = config.OCO_OMIT_SCOPE
|
||||
const GEN_COMMITLINT_CONSISTENCY_PROMPT = (
|
||||
prompts: string[]
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam[] => [
|
||||
{
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
|
||||
{
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages for two different changes in a single codebase and output them in the provided JSON format: one for a bug fix and another for a new feature.
|
||||
|
||||
Here are the specific requirements and conventions that should be strictly followed:
|
||||
|
||||
Commit Message Conventions:
|
||||
- The commit message consists of three parts: Header, Body, and Footer.
|
||||
- Header:
|
||||
- Format: ${config.OCO_OMIT_SCOPE ? '`<type>: <subject>`' : '`<type>(<scope>): <subject>`'}
|
||||
- Format: ${
|
||||
config.OCO_OMIT_SCOPE
|
||||
? '`<type>: <subject>`'
|
||||
: '`<type>(<scope>): <subject>`'
|
||||
}
|
||||
- ${prompts.join('\n- ')}
|
||||
|
||||
JSON Output Format:
|
||||
@@ -246,9 +252,9 @@ Additional Details:
|
||||
- Allowing the server to listen on a port specified through the environment variable is considered a new feature.
|
||||
|
||||
Example Git Diff is to follow:`
|
||||
},
|
||||
INIT_DIFF_PROMPT
|
||||
];
|
||||
},
|
||||
INIT_DIFF_PROMPT
|
||||
];
|
||||
|
||||
/**
|
||||
* Prompt to have LLM generate a message using @commitlint rules.
|
||||
@@ -262,25 +268,30 @@ const INIT_MAIN_PROMPT = (
|
||||
prompts: string[]
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${config.OCO_WHY ? 'and WHY the changes were done' : ''
|
||||
}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${config.OCO_EMOJI
|
||||
? 'Use GitMoji convention to preface the commit.'
|
||||
: 'Do not preface the commit with anything.'
|
||||
}
|
||||
${config.OCO_DESCRIPTION
|
||||
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
|
||||
: "Don't add any descriptions to the commit, only commit message."
|
||||
}
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${
|
||||
config.OCO_WHY ? 'and WHY the changes were done' : ''
|
||||
}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${
|
||||
config.OCO_EMOJI
|
||||
? 'Use GitMoji convention to preface the commit.'
|
||||
: 'Do not preface the commit with anything.'
|
||||
}
|
||||
${
|
||||
config.OCO_DESCRIPTION
|
||||
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
|
||||
: "Don't add any descriptions to the commit, only commit message."
|
||||
}
|
||||
Use the present tense. Use ${language} to answer.
|
||||
${config.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
: ''
|
||||
}
|
||||
${config.OCO_OMIT_SCOPE
|
||||
? 'Do not include a scope in the commit message format. Use the format: <type>: <subject>'
|
||||
: ''
|
||||
}
|
||||
${
|
||||
config.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
: ''
|
||||
}
|
||||
${
|
||||
config.OCO_OMIT_SCOPE
|
||||
? 'Do not include a scope in the commit message format. Use the format: <type>: <subject>'
|
||||
: ''
|
||||
}
|
||||
You will strictly follow the following conventions to generate the content of the commit message:
|
||||
- ${prompts.join('\n- ')}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ export const getJSONBlock = (input: string): string => {
|
||||
if (jsonIndex > -1) {
|
||||
input = input.slice(jsonIndex + 8);
|
||||
const endJsonIndex = input.search('```');
|
||||
input = input.slice(0, endJsonIndex);
|
||||
input = input.slice(0, endJsonIndex);
|
||||
}
|
||||
return input;
|
||||
};
|
||||
|
||||
@@ -95,10 +95,10 @@ const CONVENTIONAL_COMMIT_KEYWORDS =
|
||||
'Do not preface the commit with anything, except for the conventional commit keywords: fix, feat, build, chore, ci, docs, style, refactor, perf, test.';
|
||||
|
||||
const getCommitConvention = (fullGitMojiSpec: boolean) =>
|
||||
config.OCO_EMOJI
|
||||
? fullGitMojiSpec
|
||||
? FULL_GITMOJI_SPEC
|
||||
: GITMOJI_HELP
|
||||
fullGitMojiSpec
|
||||
? FULL_GITMOJI_SPEC
|
||||
: config.OCO_EMOJI
|
||||
? GITMOJI_HELP
|
||||
: CONVENTIONAL_COMMIT_KEYWORDS;
|
||||
|
||||
const getDescriptionInstruction = () =>
|
||||
@@ -123,41 +123,41 @@ const getScopeInstruction = () =>
|
||||
* $ oco -- This is a context used to generate the commit message
|
||||
* @returns - The context of the user input
|
||||
*/
|
||||
const userInputCodeContext = (context: string) => {
|
||||
if (context !== '' && context !== ' ') {
|
||||
return `Additional context provided by the user: <context>${context}</context>\nConsider this context when generating the commit message, incorporating relevant information when appropriate.`;
|
||||
const userInputCodeContext = (context: string | undefined | null) => {
|
||||
const trimmed = (context ?? '').trim();
|
||||
if (trimmed === '') {
|
||||
return '';
|
||||
}
|
||||
return '';
|
||||
return `Additional context provided by the user: <context>${trimmed}</context>\nConsider this context when generating the commit message, incorporating relevant information when appropriate.`;
|
||||
};
|
||||
|
||||
const INIT_MAIN_PROMPT = (
|
||||
language: string,
|
||||
fullGitMojiSpec: boolean,
|
||||
context: string
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'system',
|
||||
content: (() => {
|
||||
const commitConvention = fullGitMojiSpec
|
||||
? 'GitMoji specification'
|
||||
: 'Conventional Commit Convention';
|
||||
const missionStatement = `${IDENTITY} Your mission is to create clean and comprehensive commit messages as per the ${commitConvention} and explain WHAT were the changes and mainly WHY the changes were done.`;
|
||||
const diffInstruction =
|
||||
"I'll send you an output of 'git diff --staged' command, and you are to convert it into a commit message.";
|
||||
const conventionGuidelines = getCommitConvention(fullGitMojiSpec);
|
||||
const descriptionGuideline = getDescriptionInstruction();
|
||||
const oneLineCommitGuideline = getOneLineCommitInstruction();
|
||||
const scopeInstruction = getScopeInstruction();
|
||||
const generalGuidelines = `Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`;
|
||||
const userInputContext = userInputCodeContext(context);
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => {
|
||||
const commitConvention = fullGitMojiSpec
|
||||
? 'GitMoji specification'
|
||||
: 'Conventional Commit Convention';
|
||||
const missionStatement = `${IDENTITY} Your mission is to create clean and comprehensive commit messages as per the ${commitConvention} and explain WHAT were the changes and mainly WHY the changes were done.`;
|
||||
const diffInstruction =
|
||||
"I'll send you an output of 'git diff --staged' command, and you are to convert it into a commit message.";
|
||||
const conventionGuidelines = getCommitConvention(fullGitMojiSpec);
|
||||
const descriptionGuideline = getDescriptionInstruction();
|
||||
const oneLineCommitGuideline = getOneLineCommitInstruction();
|
||||
const scopeInstruction = getScopeInstruction();
|
||||
const generalGuidelines = `Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`;
|
||||
const userInputContext = userInputCodeContext(context);
|
||||
|
||||
return `${missionStatement}\n${diffInstruction}\n${conventionGuidelines}\n${descriptionGuideline}\n${oneLineCommitGuideline}\n${scopeInstruction}\n${generalGuidelines}\n${userInputContext}`;
|
||||
})()
|
||||
});
|
||||
const content = `${missionStatement}\n${diffInstruction}\n${conventionGuidelines}\n${descriptionGuideline}\n${oneLineCommitGuideline}\n${scopeInstruction}\n${generalGuidelines}\n${userInputContext}`;
|
||||
|
||||
return { role: 'system', content };
|
||||
};
|
||||
|
||||
export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessageParam =
|
||||
{
|
||||
role: 'user',
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
{
|
||||
role: 'user',
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
index ad4db42..f3b18a9 100644
|
||||
--- a/src/server.ts
|
||||
+++ b/src/server.ts
|
||||
@@ -181,7 +181,7 @@ export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessagePara
|
||||
+app.listen(process.env.PORT || PORT, () => {
|
||||
+ console.log(\`Server listening on port \${PORT}\`);
|
||||
});`
|
||||
};
|
||||
};
|
||||
|
||||
const COMMIT_TYPES = {
|
||||
fix: '🐛',
|
||||
@@ -193,19 +193,19 @@ const generateCommitString = (
|
||||
message: string
|
||||
): string => {
|
||||
const cleanMessage = removeConventionalCommitWord(message);
|
||||
return config.OCO_EMOJI
|
||||
? `${COMMIT_TYPES[type]} ${cleanMessage}`
|
||||
: message;
|
||||
return config.OCO_EMOJI ? `${COMMIT_TYPES[type]} ${cleanMessage}` : message;
|
||||
};
|
||||
|
||||
const getConsistencyContent = (translation: ConsistencyPrompt) => {
|
||||
const fixMessage = config.OCO_OMIT_SCOPE && translation.commitFixOmitScope
|
||||
? translation.commitFixOmitScope
|
||||
: translation.commitFix;
|
||||
const fixMessage =
|
||||
config.OCO_OMIT_SCOPE && translation.commitFixOmitScope
|
||||
? translation.commitFixOmitScope
|
||||
: translation.commitFix;
|
||||
|
||||
const featMessage = config.OCO_OMIT_SCOPE && translation.commitFeatOmitScope
|
||||
? translation.commitFeatOmitScope
|
||||
: translation.commitFeat;
|
||||
const featMessage =
|
||||
config.OCO_OMIT_SCOPE && translation.commitFeatOmitScope
|
||||
? translation.commitFeatOmitScope
|
||||
: translation.commitFeat;
|
||||
|
||||
const fix = generateCommitString('fix', fixMessage);
|
||||
const feat = config.OCO_ONE_LINE_COMMIT
|
||||
@@ -250,7 +250,7 @@ export const getMainCommitPrompt = async (
|
||||
INIT_DIFF_PROMPT,
|
||||
INIT_CONSISTENCY_PROMPT(
|
||||
commitLintConfig.consistency[
|
||||
translation.localLanguage
|
||||
translation.localLanguage
|
||||
] as ConsistencyPrompt
|
||||
)
|
||||
];
|
||||
|
||||
@@ -6,6 +6,10 @@ import currentPackage from '../../package.json';
|
||||
import { getOpenCommitLatestVersion } from '../version';
|
||||
|
||||
export const checkIsLatestVersion = async () => {
|
||||
if (process.env.OCO_TEST_SKIP_VERSION_CHECK === 'true') {
|
||||
return;
|
||||
}
|
||||
|
||||
const latestVersion = await getOpenCommitLatestVersion();
|
||||
|
||||
if (latestVersion) {
|
||||
|
||||
21
src/utils/customHeaders.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
export function parseCustomHeaders(headers: any): Record<string, string> {
|
||||
let parsedHeaders = {};
|
||||
|
||||
if (!headers) {
|
||||
return parsedHeaders;
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
parsedHeaders = headers;
|
||||
} else {
|
||||
parsedHeaders = JSON.parse(headers);
|
||||
}
|
||||
} catch {
|
||||
console.warn(
|
||||
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
|
||||
);
|
||||
}
|
||||
|
||||
return parsedHeaders;
|
||||
}
|
||||
@@ -11,48 +11,34 @@ import { TestAi, TestMockType } from '../engine/testAi';
|
||||
import { GroqEngine } from '../engine/groq';
|
||||
import { MLXEngine } from '../engine/mlx';
|
||||
import { DeepseekEngine } from '../engine/deepseek';
|
||||
import { AimlApiEngine } from '../engine/aimlapi';
|
||||
import { OpenRouterEngine } from '../engine/openrouter';
|
||||
|
||||
export function parseCustomHeaders(headers: any): Record<string, string> {
|
||||
let parsedHeaders = {};
|
||||
|
||||
if (!headers) {
|
||||
return parsedHeaders;
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
parsedHeaders = headers;
|
||||
} else {
|
||||
parsedHeaders = JSON.parse(headers);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
|
||||
);
|
||||
}
|
||||
|
||||
return parsedHeaders;
|
||||
}
|
||||
import { parseCustomHeaders } from './customHeaders';
|
||||
import { resolveProxy } from './proxy';
|
||||
|
||||
export function getEngine(): AiEngine {
|
||||
const config = getConfig();
|
||||
const provider = config.OCO_AI_PROVIDER;
|
||||
|
||||
const customHeaders = parseCustomHeaders(config.OCO_API_CUSTOM_HEADERS);
|
||||
const resolvedProxy = resolveProxy(config.OCO_PROXY);
|
||||
|
||||
const DEFAULT_CONFIG = {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_API_URL!,
|
||||
proxy: resolvedProxy,
|
||||
apiKey: config.OCO_API_KEY!,
|
||||
customHeaders
|
||||
};
|
||||
|
||||
switch (provider) {
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
return new OllamaEngine(DEFAULT_CONFIG);
|
||||
return new OllamaEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
ollamaThink: config.OCO_OLLAMA_THINK
|
||||
});
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return new AnthropicEngine(DEFAULT_CONFIG);
|
||||
@@ -81,6 +67,9 @@ export function getEngine(): AiEngine {
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
return new DeepseekEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
return new AimlApiEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
return new OpenRouterEngine(DEFAULT_CONFIG);
|
||||
|
||||
|
||||
205
src/utils/engineErrorHandler.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import axios from 'axios';
|
||||
import {
|
||||
AuthenticationError,
|
||||
InsufficientCreditsError,
|
||||
ModelNotFoundError,
|
||||
RateLimitError,
|
||||
ServiceUnavailableError
|
||||
} from './errors';
|
||||
|
||||
/**
|
||||
* Extracts HTTP status code from various error types
|
||||
*/
|
||||
function getStatusCode(error: unknown): number | null {
|
||||
// Direct status property (common in API SDKs)
|
||||
if (typeof (error as any)?.status === 'number') {
|
||||
return (error as any).status;
|
||||
}
|
||||
|
||||
// Axios-style errors
|
||||
if (axios.isAxiosError(error)) {
|
||||
return error.response?.status ?? null;
|
||||
}
|
||||
|
||||
// Response object with status
|
||||
if (typeof (error as any)?.response?.status === 'number') {
|
||||
return (error as any).response.status;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts retry-after value from error headers (for rate limiting)
|
||||
*/
|
||||
function getRetryAfter(error: unknown): number | undefined {
|
||||
const headers = (error as any)?.response?.headers;
|
||||
if (headers) {
|
||||
const retryAfter = headers['retry-after'] || headers['Retry-After'];
|
||||
if (retryAfter) {
|
||||
const seconds = parseInt(retryAfter, 10);
|
||||
if (!isNaN(seconds)) {
|
||||
return seconds;
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the error message from various error structures
|
||||
*/
|
||||
function extractErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
// API error response structures
|
||||
const apiError = (error as any)?.response?.data?.error;
|
||||
if (apiError) {
|
||||
if (typeof apiError === 'string') {
|
||||
return apiError;
|
||||
}
|
||||
if (apiError.message) {
|
||||
return apiError.message;
|
||||
}
|
||||
}
|
||||
|
||||
// Direct error data
|
||||
const errorData = (error as any)?.error;
|
||||
if (errorData) {
|
||||
if (typeof errorData === 'string') {
|
||||
return errorData;
|
||||
}
|
||||
if (errorData.message) {
|
||||
return errorData.message;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback
|
||||
if (typeof error === 'string') {
|
||||
return error;
|
||||
}
|
||||
|
||||
return 'An unknown error occurred';
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the error message indicates a model not found error
|
||||
*/
|
||||
function isModelNotFoundMessage(message: string): boolean {
|
||||
const lowerMessage = message.toLowerCase();
|
||||
return (
|
||||
(lowerMessage.includes('model') &&
|
||||
(lowerMessage.includes('not found') ||
|
||||
lowerMessage.includes('does not exist') ||
|
||||
lowerMessage.includes('invalid') ||
|
||||
lowerMessage.includes('pull'))) ||
|
||||
lowerMessage.includes('does_not_exist')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the error message indicates insufficient credits
|
||||
*/
|
||||
function isInsufficientCreditsMessage(message: string): boolean {
|
||||
const lowerMessage = message.toLowerCase();
|
||||
return (
|
||||
lowerMessage.includes('insufficient') ||
|
||||
lowerMessage.includes('credit') ||
|
||||
lowerMessage.includes('quota') ||
|
||||
lowerMessage.includes('balance too low') ||
|
||||
lowerMessage.includes('billing') ||
|
||||
lowerMessage.includes('payment required') ||
|
||||
lowerMessage.includes('exceeded')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes raw API errors into typed error classes.
|
||||
* This provides consistent error handling across all engine implementations.
|
||||
*
|
||||
* @param error - The raw error from the API call
|
||||
* @param provider - The AI provider name (e.g., 'openai', 'anthropic')
|
||||
* @param model - The model being used
|
||||
* @returns A typed Error instance
|
||||
*/
|
||||
export function normalizeEngineError(
|
||||
error: unknown,
|
||||
provider: string,
|
||||
model: string
|
||||
): Error {
|
||||
// If it's already one of our custom errors, return as-is
|
||||
if (
|
||||
error instanceof ModelNotFoundError ||
|
||||
error instanceof AuthenticationError ||
|
||||
error instanceof InsufficientCreditsError ||
|
||||
error instanceof RateLimitError ||
|
||||
error instanceof ServiceUnavailableError
|
||||
) {
|
||||
return error;
|
||||
}
|
||||
|
||||
const statusCode = getStatusCode(error);
|
||||
const message = extractErrorMessage(error);
|
||||
|
||||
// Handle based on HTTP status codes
|
||||
switch (statusCode) {
|
||||
case 401:
|
||||
return new AuthenticationError(provider, message);
|
||||
|
||||
case 402:
|
||||
return new InsufficientCreditsError(provider, message);
|
||||
|
||||
case 404:
|
||||
// Could be model not found or endpoint not found
|
||||
if (isModelNotFoundMessage(message)) {
|
||||
return new ModelNotFoundError(model, provider, 404);
|
||||
}
|
||||
// Return generic error for other 404s
|
||||
return error instanceof Error ? error : new Error(message);
|
||||
|
||||
case 429:
|
||||
const retryAfter = getRetryAfter(error);
|
||||
return new RateLimitError(provider, retryAfter, message);
|
||||
|
||||
case 500:
|
||||
case 502:
|
||||
case 503:
|
||||
case 504:
|
||||
return new ServiceUnavailableError(provider, statusCode, message);
|
||||
}
|
||||
|
||||
// Handle based on error message content
|
||||
if (isModelNotFoundMessage(message)) {
|
||||
return new ModelNotFoundError(model, provider, 404);
|
||||
}
|
||||
|
||||
if (isInsufficientCreditsMessage(message)) {
|
||||
return new InsufficientCreditsError(provider, message);
|
||||
}
|
||||
|
||||
// Check for rate limit patterns in message
|
||||
const lowerMessage = message.toLowerCase();
|
||||
if (
|
||||
lowerMessage.includes('rate limit') ||
|
||||
lowerMessage.includes('rate_limit') ||
|
||||
lowerMessage.includes('too many requests')
|
||||
) {
|
||||
return new RateLimitError(provider, undefined, message);
|
||||
}
|
||||
|
||||
// Check for auth patterns in message
|
||||
if (
|
||||
lowerMessage.includes('unauthorized') ||
|
||||
lowerMessage.includes('api key') ||
|
||||
lowerMessage.includes('apikey') ||
|
||||
lowerMessage.includes('authentication') ||
|
||||
lowerMessage.includes('invalid_api_key')
|
||||
) {
|
||||
return new AuthenticationError(provider, message);
|
||||
}
|
||||
|
||||
// Return original error or wrap in Error if needed
|
||||
return error instanceof Error ? error : new Error(message);
|
||||
}
|
||||
515
src/utils/errors.ts
Normal file
@@ -0,0 +1,515 @@
|
||||
import chalk from 'chalk';
|
||||
import { MODEL_LIST, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
|
||||
// Provider billing/help URLs for common errors
|
||||
export const PROVIDER_BILLING_URLS: Record<string, string | null> = {
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]:
|
||||
'https://console.anthropic.com/settings/billing',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]:
|
||||
'https://platform.openai.com/settings/organization/billing',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'https://aistudio.google.com/app/plan',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'https://console.groq.com/settings/billing',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'https://console.mistral.ai/billing/',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'https://platform.deepseek.com/usage',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'https://openrouter.ai/credits',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'https://aimlapi.com/app/billing',
|
||||
[OCO_AI_PROVIDER_ENUM.AZURE]:
|
||||
'https://portal.azure.com/#view/Microsoft_Azure_CostManagement',
|
||||
[OCO_AI_PROVIDER_ENUM.OLLAMA]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.MLX]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.FLOWISE]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.TEST]: null
|
||||
};
|
||||
|
||||
// Error type for insufficient credits/quota
|
||||
export class InsufficientCreditsError extends Error {
|
||||
public readonly provider: string;
|
||||
|
||||
constructor(provider: string, message?: string) {
|
||||
super(
|
||||
message || `Insufficient credits or quota for provider '${provider}'`
|
||||
);
|
||||
this.name = 'InsufficientCreditsError';
|
||||
this.provider = provider;
|
||||
}
|
||||
}
|
||||
|
||||
// Error type for rate limiting (429 errors)
|
||||
export class RateLimitError extends Error {
|
||||
public readonly provider: string;
|
||||
public readonly retryAfter?: number;
|
||||
|
||||
constructor(provider: string, retryAfter?: number, message?: string) {
|
||||
super(message || `Rate limit exceeded for provider '${provider}'`);
|
||||
this.name = 'RateLimitError';
|
||||
this.provider = provider;
|
||||
this.retryAfter = retryAfter;
|
||||
}
|
||||
}
|
||||
|
||||
// Error type for service unavailable (5xx errors)
|
||||
export class ServiceUnavailableError extends Error {
|
||||
public readonly provider: string;
|
||||
public readonly statusCode: number;
|
||||
|
||||
constructor(provider: string, statusCode: number = 503, message?: string) {
|
||||
super(message || `Service unavailable for provider '${provider}'`);
|
||||
this.name = 'ServiceUnavailableError';
|
||||
this.provider = provider;
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
// Error type for authentication failures
|
||||
export class AuthenticationError extends Error {
|
||||
public readonly provider: string;
|
||||
|
||||
constructor(provider: string, message?: string) {
|
||||
super(message || `Authentication failed for provider '${provider}'`);
|
||||
this.name = 'AuthenticationError';
|
||||
this.provider = provider;
|
||||
}
|
||||
}
|
||||
|
||||
export class ModelNotFoundError extends Error {
|
||||
public readonly modelName: string;
|
||||
public readonly provider: string;
|
||||
public readonly statusCode: number;
|
||||
|
||||
constructor(modelName: string, provider: string, statusCode: number = 404) {
|
||||
super(`Model '${modelName}' not found for provider '${provider}'`);
|
||||
this.name = 'ModelNotFoundError';
|
||||
this.modelName = modelName;
|
||||
this.provider = provider;
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
export class ApiKeyMissingError extends Error {
|
||||
public readonly provider: string;
|
||||
|
||||
constructor(provider: string) {
|
||||
super(`API key is missing for provider '${provider}'`);
|
||||
this.name = 'ApiKeyMissingError';
|
||||
this.provider = provider;
|
||||
}
|
||||
}
|
||||
|
||||
export function isModelNotFoundError(error: unknown): boolean {
|
||||
if (error instanceof ModelNotFoundError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// OpenAI error patterns
|
||||
if (
|
||||
message.includes('model') &&
|
||||
(message.includes('not found') ||
|
||||
message.includes('does not exist') ||
|
||||
message.includes('invalid model'))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Anthropic error patterns
|
||||
if (
|
||||
message.includes('model') &&
|
||||
(message.includes('not found') || message.includes('invalid'))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 404 status in axios/fetch errors
|
||||
if (
|
||||
'status' in (error as any) &&
|
||||
(error as any).status === 404 &&
|
||||
message.includes('model')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for response status
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 404) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function isApiKeyError(error: unknown): boolean {
|
||||
if (error instanceof ApiKeyMissingError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// Common API key error patterns
|
||||
if (
|
||||
message.includes('api key') ||
|
||||
message.includes('apikey') ||
|
||||
message.includes('authentication') ||
|
||||
message.includes('unauthorized') ||
|
||||
message.includes('invalid_api_key') ||
|
||||
message.includes('incorrect api key')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 401 status
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 401) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function getSuggestedModels(
|
||||
provider: string,
|
||||
failedModel: string
|
||||
): string[] {
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
const models = MODEL_LIST[providerKey];
|
||||
|
||||
if (!models || !Array.isArray(models)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Return first 5 models as suggestions, excluding the failed one
|
||||
return models.filter((m) => m !== failedModel).slice(0, 5);
|
||||
}
|
||||
|
||||
export function getRecommendedModel(provider: string): string | null {
|
||||
switch (provider.toLowerCase()) {
|
||||
case OCO_AI_PROVIDER_ENUM.OPENAI:
|
||||
return 'gpt-4o-mini';
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return 'claude-sonnet-4-20250514';
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
return 'gemini-1.5-flash';
|
||||
case OCO_AI_PROVIDER_ENUM.GROQ:
|
||||
return 'llama3-70b-8192';
|
||||
case OCO_AI_PROVIDER_ENUM.MISTRAL:
|
||||
return 'mistral-small-latest';
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
return 'deepseek-chat';
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
return 'openai/gpt-4o-mini';
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
return 'gpt-4o-mini';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatErrorWithRecovery(
|
||||
error: Error,
|
||||
provider: string,
|
||||
model: string
|
||||
): string {
|
||||
const suggestions = getSuggestedModels(provider, model);
|
||||
const recommended = getRecommendedModel(provider);
|
||||
|
||||
let message = `\n${error.message}\n`;
|
||||
|
||||
if (suggestions.length > 0) {
|
||||
message += '\nSuggested alternatives:\n';
|
||||
suggestions.forEach((m, i) => {
|
||||
const isRecommended = m === recommended;
|
||||
message += ` ${i + 1}. ${m}${isRecommended ? ' (Recommended)' : ''}\n`;
|
||||
});
|
||||
}
|
||||
|
||||
message += '\nTo fix this, run: oco config set OCO_MODEL=<model-name>\n';
|
||||
message += 'Or run: oco setup\n';
|
||||
|
||||
return message;
|
||||
}
|
||||
|
||||
// Detect insufficient credits/quota errors from various providers
|
||||
export function isInsufficientCreditsError(error: unknown): boolean {
|
||||
if (error instanceof InsufficientCreditsError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// Common patterns for insufficient credits/quota
|
||||
if (
|
||||
message.includes('insufficient') ||
|
||||
message.includes('credit') ||
|
||||
message.includes('quota') ||
|
||||
message.includes('balance') ||
|
||||
message.includes('billing') ||
|
||||
message.includes('payment') ||
|
||||
message.includes('exceeded') ||
|
||||
message.includes('limit reached') ||
|
||||
message.includes('no remaining')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 402 Payment Required status
|
||||
if ('status' in (error as any) && (error as any).status === 402) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 402) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Detect rate limit errors (429)
|
||||
export function isRateLimitError(error: unknown): boolean {
|
||||
if (error instanceof RateLimitError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// Common patterns for rate limiting
|
||||
if (
|
||||
message.includes('rate limit') ||
|
||||
message.includes('rate_limit') ||
|
||||
message.includes('too many requests') ||
|
||||
message.includes('throttle')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 429 status
|
||||
if ('status' in (error as any) && (error as any).status === 429) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 429) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Detect service unavailable errors (5xx)
|
||||
export function isServiceUnavailableError(error: unknown): boolean {
|
||||
if (error instanceof ServiceUnavailableError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// Common patterns for service unavailable
|
||||
if (
|
||||
message.includes('service unavailable') ||
|
||||
message.includes('server error') ||
|
||||
message.includes('internal error') ||
|
||||
message.includes('temporarily unavailable') ||
|
||||
message.includes('overloaded')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 5xx status
|
||||
const status = (error as any).status || (error as any).response?.status;
|
||||
if (status && status >= 500 && status < 600) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// User-friendly formatted error structure
|
||||
export interface FormattedError {
|
||||
title: string;
|
||||
message: string;
|
||||
helpUrl: string | null;
|
||||
suggestion: string | null;
|
||||
}
|
||||
|
||||
export interface ErrorFormattingContext {
|
||||
baseURL?: string;
|
||||
}
|
||||
|
||||
function getCustomEndpointLabel(baseURL?: string): string | null {
|
||||
if (!baseURL) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return new URL(baseURL).host;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getServiceUnavailableMessage(
|
||||
provider: string,
|
||||
context?: ErrorFormattingContext
|
||||
): string {
|
||||
const endpointLabel = getCustomEndpointLabel(context?.baseURL);
|
||||
|
||||
if (endpointLabel) {
|
||||
return `The configured API endpoint (${endpointLabel}) is temporarily unavailable.`;
|
||||
}
|
||||
|
||||
if (context?.baseURL) {
|
||||
return 'The configured API endpoint is temporarily unavailable.';
|
||||
}
|
||||
|
||||
return `The ${provider} service is temporarily unavailable.`;
|
||||
}
|
||||
|
||||
// Format an error into a user-friendly structure
|
||||
export function formatUserFriendlyError(
|
||||
error: unknown,
|
||||
provider: string,
|
||||
context?: ErrorFormattingContext
|
||||
): FormattedError {
|
||||
const billingUrl = PROVIDER_BILLING_URLS[provider] || null;
|
||||
|
||||
// Handle our custom error types first
|
||||
if (error instanceof InsufficientCreditsError) {
|
||||
return {
|
||||
title: 'Insufficient Credits',
|
||||
message: `Your ${provider} account has insufficient credits or quota.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: 'Add credits to your account to continue using the service.'
|
||||
};
|
||||
}
|
||||
|
||||
if (error instanceof RateLimitError) {
|
||||
const retryMsg = error.retryAfter
|
||||
? `Please wait ${error.retryAfter} seconds before retrying.`
|
||||
: 'Please wait a moment before retrying.';
|
||||
return {
|
||||
title: 'Rate Limit Exceeded',
|
||||
message: `You've made too many requests to ${provider}.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: retryMsg
|
||||
};
|
||||
}
|
||||
|
||||
if (error instanceof ServiceUnavailableError) {
|
||||
return {
|
||||
title: 'Service Unavailable',
|
||||
message: getServiceUnavailableMessage(provider, context),
|
||||
helpUrl: null,
|
||||
suggestion: 'Please try again in a few moments.'
|
||||
};
|
||||
}
|
||||
|
||||
if (error instanceof AuthenticationError) {
|
||||
return {
|
||||
title: 'Authentication Failed',
|
||||
message: `Your ${provider} API key is invalid or expired.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: 'Run `oco setup` to configure a valid API key.'
|
||||
};
|
||||
}
|
||||
|
||||
if (error instanceof ModelNotFoundError) {
|
||||
return {
|
||||
title: 'Model Not Found',
|
||||
message: `The model '${error.modelName}' is not available for ${provider}.`,
|
||||
helpUrl: null,
|
||||
suggestion: 'Run `oco setup` to select a valid model.'
|
||||
};
|
||||
}
|
||||
|
||||
// Detect error type from raw errors
|
||||
if (isInsufficientCreditsError(error)) {
|
||||
return {
|
||||
title: 'Insufficient Credits',
|
||||
message: `Your ${provider} account has insufficient credits or quota.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: 'Add credits to your account to continue using the service.'
|
||||
};
|
||||
}
|
||||
|
||||
if (isRateLimitError(error)) {
|
||||
return {
|
||||
title: 'Rate Limit Exceeded',
|
||||
message: `You've made too many requests to ${provider}.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: 'Please wait a moment before retrying.'
|
||||
};
|
||||
}
|
||||
|
||||
if (isServiceUnavailableError(error)) {
|
||||
return {
|
||||
title: 'Service Unavailable',
|
||||
message: getServiceUnavailableMessage(provider, context),
|
||||
helpUrl: null,
|
||||
suggestion: 'Please try again in a few moments.'
|
||||
};
|
||||
}
|
||||
|
||||
if (isApiKeyError(error)) {
|
||||
return {
|
||||
title: 'Authentication Failed',
|
||||
message: `Your ${provider} API key is invalid or expired.`,
|
||||
helpUrl: billingUrl,
|
||||
suggestion: 'Run `oco setup` to configure a valid API key.'
|
||||
};
|
||||
}
|
||||
|
||||
if (isModelNotFoundError(error)) {
|
||||
const model = (error as any).modelName || (error as any).model || 'unknown';
|
||||
return {
|
||||
title: 'Model Not Found',
|
||||
message: `The model '${model}' is not available for ${provider}.`,
|
||||
helpUrl: null,
|
||||
suggestion: 'Run `oco setup` to select a valid model.'
|
||||
};
|
||||
}
|
||||
|
||||
// Default: generic error
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
return {
|
||||
title: 'Error',
|
||||
message: errorMessage,
|
||||
helpUrl: null,
|
||||
suggestion: 'Run `oco setup` to reconfigure or check your settings.'
|
||||
};
|
||||
}
|
||||
|
||||
// Print a formatted error as a chalk-styled string
|
||||
export function printFormattedError(formatted: FormattedError): string {
|
||||
let output = `\n${chalk.red('✖')} ${chalk.bold.red(formatted.title)}\n`;
|
||||
output += ` ${formatted.message}\n`;
|
||||
|
||||
if (formatted.helpUrl) {
|
||||
output += `\n ${chalk.cyan('Help:')} ${chalk.underline(
|
||||
formatted.helpUrl
|
||||
)}\n`;
|
||||
}
|
||||
|
||||
if (formatted.suggestion) {
|
||||
output += `\n ${chalk.yellow('Suggestion:')} ${formatted.suggestion}\n`;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
8
src/utils/generateCommitMessageErrors.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { DEFAULT_TOKEN_LIMITS } from '../commands/config';
|
||||
|
||||
export enum GenerateCommitMessageErrorEnum {
|
||||
tooMuchTokens = 'TOO_MUCH_TOKENS',
|
||||
internalError = 'INTERNAL_ERROR',
|
||||
emptyMessage = 'EMPTY_MESSAGE',
|
||||
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
|
||||
}
|
||||
110
src/utils/git.ts
@@ -1,7 +1,7 @@
|
||||
import { execa } from 'execa';
|
||||
import { readFileSync } from 'fs';
|
||||
import ignore, { Ignore } from 'ignore';
|
||||
|
||||
import { join } from 'path';
|
||||
import { outro, spinner } from '@clack/prompts';
|
||||
|
||||
export const assertGitRepo = async () => {
|
||||
@@ -16,41 +16,44 @@ export const assertGitRepo = async () => {
|
||||
// (file) => `:(exclude)${file}`
|
||||
// );
|
||||
|
||||
export const getOpenCommitIgnore = (): Ignore => {
|
||||
export const getOpenCommitIgnore = async (): Promise<Ignore> => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const ig = ignore();
|
||||
|
||||
try {
|
||||
ig.add(readFileSync('.opencommitignore').toString().split('\n'));
|
||||
ig.add(
|
||||
readFileSync(join(gitDir, '.opencommitignore')).toString().split('\n')
|
||||
);
|
||||
} catch (e) {}
|
||||
|
||||
return ig;
|
||||
};
|
||||
|
||||
export const getCoreHooksPath = async (): Promise<string> => {
|
||||
const { stdout } = await execa('git', ['config', 'core.hooksPath']);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout } = await execa('git', ['config', 'core.hooksPath'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
return stdout;
|
||||
};
|
||||
|
||||
export const getStagedFiles = async (): Promise<string[]> => {
|
||||
const { stdout: gitDir } = await execa('git', [
|
||||
'rev-parse',
|
||||
'--show-toplevel'
|
||||
]);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout: files } = await execa('git', [
|
||||
'diff',
|
||||
'--name-only',
|
||||
'--cached',
|
||||
'--relative',
|
||||
gitDir
|
||||
]);
|
||||
const { stdout: files } = await execa(
|
||||
'git',
|
||||
['diff', '--name-only', '--cached', '--relative'],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
if (!files) return [];
|
||||
|
||||
const filesList = files.split('\n');
|
||||
|
||||
const ig = getOpenCommitIgnore();
|
||||
const ig = await getOpenCommitIgnore();
|
||||
const allowedFiles = filesList.filter((file) => !ig.ignores(file));
|
||||
|
||||
if (!allowedFiles) return [];
|
||||
@@ -59,12 +62,17 @@ export const getStagedFiles = async (): Promise<string[]> => {
|
||||
};
|
||||
|
||||
export const getChangedFiles = async (): Promise<string[]> => {
|
||||
const { stdout: modified } = await execa('git', ['ls-files', '--modified']);
|
||||
const { stdout: others } = await execa('git', [
|
||||
'ls-files',
|
||||
'--others',
|
||||
'--exclude-standard'
|
||||
]);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout: modified } = await execa('git', ['ls-files', '--modified'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
const { stdout: others } = await execa(
|
||||
'git',
|
||||
['ls-files', '--others', '--exclude-standard'],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
const files = [...modified.split('\n'), ...others.split('\n')].filter(
|
||||
(file) => !!file
|
||||
@@ -74,46 +82,56 @@ export const getChangedFiles = async (): Promise<string[]> => {
|
||||
};
|
||||
|
||||
export const gitAdd = async ({ files }: { files: string[] }) => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const gitAddSpinner = spinner();
|
||||
|
||||
gitAddSpinner.start('Adding files to commit');
|
||||
|
||||
await execa('git', ['add', ...files]);
|
||||
await execa('git', ['add', ...files], { cwd: gitDir });
|
||||
|
||||
gitAddSpinner.stop(`Staged ${files.length} files`);
|
||||
};
|
||||
|
||||
export const getDiff = async ({ files }: { files: string[] }) => {
|
||||
const lockFiles = files.filter(
|
||||
(file) =>
|
||||
file.includes('.lock') ||
|
||||
file.includes('-lock.') ||
|
||||
file.includes('.svg') ||
|
||||
file.includes('.png') ||
|
||||
file.includes('.jpg') ||
|
||||
file.includes('.jpeg') ||
|
||||
file.includes('.webp') ||
|
||||
file.includes('.gif')
|
||||
);
|
||||
const isFileExcludedFromDiff = (file: string) =>
|
||||
file.includes('.lock') ||
|
||||
file.includes('-lock.') ||
|
||||
file.includes('.svg') ||
|
||||
file.includes('.png') ||
|
||||
file.includes('.jpg') ||
|
||||
file.includes('.jpeg') ||
|
||||
file.includes('.webp') ||
|
||||
file.includes('.gif');
|
||||
|
||||
if (lockFiles.length) {
|
||||
export const getDiff = async ({ files }: { files: string[] }) => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const excludedFiles = files.filter(isFileExcludedFromDiff);
|
||||
|
||||
if (excludedFiles.length) {
|
||||
outro(
|
||||
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join(
|
||||
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${excludedFiles.join(
|
||||
'\n'
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
const filesWithoutLocks = files.filter(
|
||||
(file) => !file.includes('.lock') && !file.includes('-lock.')
|
||||
);
|
||||
const diffableFiles = files.filter((file) => !isFileExcludedFromDiff(file));
|
||||
|
||||
const { stdout: diff } = await execa('git', [
|
||||
'diff',
|
||||
'--staged',
|
||||
'--',
|
||||
...filesWithoutLocks
|
||||
]);
|
||||
const { stdout: diff } = await execa(
|
||||
'git',
|
||||
['diff', '--staged', '--', ...diffableFiles],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
return diff;
|
||||
};
|
||||
|
||||
export const getGitDir = async (): Promise<string> => {
|
||||
const { stdout: gitDir } = await execa('git', [
|
||||
'rev-parse',
|
||||
'--show-toplevel'
|
||||
]);
|
||||
|
||||
return gitDir;
|
||||
};
|
||||
|
||||
330
src/utils/modelCache.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { MODEL_LIST, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
|
||||
const MODEL_CACHE_PATH = pathJoin(homedir(), '.opencommit-models.json');
|
||||
const CACHE_TTL_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
|
||||
|
||||
interface ModelCache {
|
||||
timestamp: number;
|
||||
models: Record<string, string[]>;
|
||||
}
|
||||
|
||||
function readCache(): ModelCache | null {
|
||||
try {
|
||||
if (!existsSync(MODEL_CACHE_PATH)) {
|
||||
return null;
|
||||
}
|
||||
const data = readFileSync(MODEL_CACHE_PATH, 'utf8');
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function writeCache(models: Record<string, string[]>): void {
|
||||
try {
|
||||
const cache: ModelCache = {
|
||||
timestamp: Date.now(),
|
||||
models
|
||||
};
|
||||
writeFileSync(MODEL_CACHE_PATH, JSON.stringify(cache, null, 2), 'utf8');
|
||||
} catch {
|
||||
// Silently fail if we can't write cache
|
||||
}
|
||||
}
|
||||
|
||||
function isCacheValid(cache: ModelCache | null): boolean {
|
||||
if (!cache) return false;
|
||||
return Date.now() - cache.timestamp < CACHE_TTL_MS;
|
||||
}
|
||||
|
||||
export async function fetchOpenAIModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.openai.com/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data
|
||||
.map((m: { id: string }) => m.id)
|
||||
.filter(
|
||||
(id: string) =>
|
||||
id.startsWith('gpt-') ||
|
||||
id.startsWith('o1') ||
|
||||
id.startsWith('o3') ||
|
||||
id.startsWith('o4')
|
||||
)
|
||||
.sort();
|
||||
|
||||
return models.length > 0 ? models : MODEL_LIST.openai;
|
||||
} catch {
|
||||
return MODEL_LIST.openai;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchOllamaModels(
|
||||
baseUrl: string = 'http://localhost:11434'
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/api/tags`);
|
||||
|
||||
if (!response.ok) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data.models?.map((m: { name: string }) => m.name) || [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchAnthropicModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.anthropic.com/v1/models', {
|
||||
headers: {
|
||||
'x-api-key': apiKey,
|
||||
'anthropic-version': '2023-06-01'
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.anthropic;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data
|
||||
?.map((m: { id: string }) => m.id)
|
||||
.filter((id: string) => id.startsWith('claude-'))
|
||||
.sort();
|
||||
|
||||
return models && models.length > 0 ? models : MODEL_LIST.anthropic;
|
||||
} catch {
|
||||
return MODEL_LIST.anthropic;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchMistralModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.mistral.ai/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.mistral;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data?.map((m: { id: string }) => m.id).sort();
|
||||
|
||||
return models && models.length > 0 ? models : MODEL_LIST.mistral;
|
||||
} catch {
|
||||
return MODEL_LIST.mistral;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchGroqModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.groq.com/openai/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.groq;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data?.map((m: { id: string }) => m.id).sort();
|
||||
|
||||
return models && models.length > 0 ? models : MODEL_LIST.groq;
|
||||
} catch {
|
||||
return MODEL_LIST.groq;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchOpenRouterModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://openrouter.ai/api/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.openrouter;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
// Filter to text-capable models only (exclude image/audio models)
|
||||
const models = data.data
|
||||
?.filter(
|
||||
(m: { id: string; context_length?: number }) =>
|
||||
m.context_length && m.context_length > 0
|
||||
)
|
||||
.map((m: { id: string }) => m.id)
|
||||
.sort();
|
||||
|
||||
return models && models.length > 0 ? models : MODEL_LIST.openrouter;
|
||||
} catch {
|
||||
return MODEL_LIST.openrouter;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchDeepSeekModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.deepseek.com/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.deepseek;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data?.map((m: { id: string }) => m.id).sort();
|
||||
|
||||
return models && models.length > 0 ? models : MODEL_LIST.deepseek;
|
||||
} catch {
|
||||
return MODEL_LIST.deepseek;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchModelsForProvider(
|
||||
provider: string,
|
||||
apiKey?: string,
|
||||
baseUrl?: string,
|
||||
forceRefresh: boolean = false
|
||||
): Promise<string[]> {
|
||||
const cache = readCache();
|
||||
|
||||
// Return cached models if valid (unless force refresh)
|
||||
if (!forceRefresh && isCacheValid(cache) && cache!.models[provider]) {
|
||||
return cache!.models[provider];
|
||||
}
|
||||
|
||||
let models: string[] = [];
|
||||
|
||||
switch (provider.toLowerCase()) {
|
||||
case OCO_AI_PROVIDER_ENUM.OPENAI:
|
||||
if (apiKey) {
|
||||
models = await fetchOpenAIModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.openai;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
models = await fetchOllamaModels(baseUrl);
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
if (apiKey) {
|
||||
models = await fetchAnthropicModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.anthropic;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
// Google's API doesn't easily list generative models, use hardcoded list
|
||||
models = MODEL_LIST.gemini;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GROQ:
|
||||
if (apiKey) {
|
||||
models = await fetchGroqModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.groq;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.MISTRAL:
|
||||
if (apiKey) {
|
||||
models = await fetchMistralModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.mistral;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
if (apiKey) {
|
||||
models = await fetchDeepSeekModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.deepseek;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
models = MODEL_LIST.aimlapi;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
if (apiKey) {
|
||||
models = await fetchOpenRouterModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.openrouter;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
models = MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
// Update cache
|
||||
const existingCache = cache?.models || {};
|
||||
existingCache[provider] = models;
|
||||
writeCache(existingCache);
|
||||
|
||||
return models;
|
||||
}
|
||||
|
||||
export function getModelsForProvider(provider: string): string[] {
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
return MODEL_LIST[providerKey] || MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
export function clearModelCache(): void {
|
||||
try {
|
||||
if (existsSync(MODEL_CACHE_PATH)) {
|
||||
writeFileSync(MODEL_CACHE_PATH, '{}', 'utf8');
|
||||
}
|
||||
} catch {
|
||||
// Silently fail
|
||||
}
|
||||
}
|
||||
|
||||
export function getCacheInfo(): {
|
||||
timestamp: number | null;
|
||||
providers: string[];
|
||||
} {
|
||||
const cache = readCache();
|
||||
if (!cache) {
|
||||
return { timestamp: null, providers: [] };
|
||||
}
|
||||
return {
|
||||
timestamp: cache.timestamp,
|
||||
providers: Object.keys(cache.models || {})
|
||||
};
|
||||
}
|
||||
|
||||
export function getCachedModels(provider: string): string[] | null {
|
||||
const cache = readCache();
|
||||
if (!cache || !cache.models[provider]) {
|
||||
return null;
|
||||
}
|
||||
return cache.models[provider];
|
||||
}
|
||||
52
src/utils/proxy.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import axios from 'axios';
|
||||
import { HttpsProxyAgent } from 'https-proxy-agent';
|
||||
import { Agent, ProxyAgent, setGlobalDispatcher } from 'undici';
|
||||
|
||||
export type ProxySetting = string | null | undefined;
|
||||
|
||||
export function resolveProxy(proxySetting?: ProxySetting): ProxySetting {
|
||||
if (proxySetting === null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (typeof proxySetting === 'string' && proxySetting.trim().length > 0) {
|
||||
return proxySetting;
|
||||
}
|
||||
|
||||
return process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
|
||||
}
|
||||
|
||||
function resetProxySetup(disableEnvProxy: boolean) {
|
||||
setGlobalDispatcher(new Agent());
|
||||
axios.defaults.httpAgent = undefined;
|
||||
axios.defaults.httpsAgent = undefined;
|
||||
axios.defaults.proxy = disableEnvProxy ? false : undefined;
|
||||
}
|
||||
|
||||
export function setupProxy(proxySetting?: ProxySetting) {
|
||||
try {
|
||||
if (proxySetting === null) {
|
||||
resetProxySetup(true);
|
||||
return;
|
||||
}
|
||||
|
||||
resetProxySetup(false);
|
||||
|
||||
if (!proxySetting) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set global dispatcher for undici (affects globalThis.fetch used by Gemini and others)
|
||||
const dispatcher = new ProxyAgent(proxySetting);
|
||||
setGlobalDispatcher(dispatcher);
|
||||
|
||||
// Set axios global agents and disable axios built-in proxy handling.
|
||||
const agent = new HttpsProxyAgent(proxySetting);
|
||||
axios.defaults.httpAgent = agent;
|
||||
axios.defaults.httpsAgent = agent;
|
||||
axios.defaults.proxy = false;
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.warn(`[Proxy Error] Failed to set proxy: ${message}`);
|
||||
}
|
||||
}
|
||||
@@ -4,20 +4,23 @@
|
||||
* @param tag The tag name without angle brackets (e.g., 'think' for '<think></think>')
|
||||
* @returns The content with the specified tags and their contents removed, and trimmed
|
||||
*/
|
||||
export function removeContentTags<T extends string | null | undefined>(content: T, tag: string): T {
|
||||
export function removeContentTags<T extends string | null | undefined>(
|
||||
content: T,
|
||||
tag: string
|
||||
): T {
|
||||
if (!content || typeof content !== 'string') {
|
||||
return content;
|
||||
}
|
||||
|
||||
|
||||
// Dynamic implementation for other cases
|
||||
const openTag = `<${tag}>`;
|
||||
const closeTag = `</${tag}>`;
|
||||
|
||||
|
||||
// Parse the content and remove tags
|
||||
let result = '';
|
||||
let skipUntil: number | null = null;
|
||||
let depth = 0;
|
||||
|
||||
|
||||
for (let i = 0; i < content.length; i++) {
|
||||
// Check for opening tag
|
||||
if (content.substring(i, i + openTag.length) === openTag) {
|
||||
@@ -29,7 +32,10 @@ export function removeContentTags<T extends string | null | undefined>(content:
|
||||
}
|
||||
}
|
||||
// Check for closing tag
|
||||
else if (content.substring(i, i + closeTag.length) === closeTag && depth > 0) {
|
||||
else if (
|
||||
content.substring(i, i + closeTag.length) === closeTag &&
|
||||
depth > 0
|
||||
) {
|
||||
depth--;
|
||||
if (depth === 0) {
|
||||
i = i + closeTag.length - 1; // Skip the closing tag
|
||||
@@ -37,7 +43,7 @@ export function removeContentTags<T extends string | null | undefined>(content:
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Only add character if not inside a tag
|
||||
if (skipUntil === null) {
|
||||
result += content[i];
|
||||
|
||||
757
test/e2e/cliBehavior.test.ts
Normal file
@@ -0,0 +1,757 @@
|
||||
import {
|
||||
existsSync,
|
||||
lstatSync,
|
||||
readFileSync,
|
||||
realpathSync,
|
||||
rmSync,
|
||||
writeFileSync
|
||||
} from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import {
|
||||
assertGitStatus,
|
||||
assertHeadCommit,
|
||||
getHeadCommitFiles,
|
||||
getMockOpenAiEnv,
|
||||
prepareEnvironment,
|
||||
prepareRepo,
|
||||
prepareTempDir,
|
||||
runCli,
|
||||
runGit,
|
||||
runProcess,
|
||||
seedMigrations,
|
||||
seedModelCache,
|
||||
startMockOpenAiServer,
|
||||
waitForExit,
|
||||
writeGlobalConfig,
|
||||
writeRepoFile
|
||||
} from './utils';
|
||||
|
||||
it('cli flow passes --context through to the model prompt and skips confirmation with --yes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(context): handle production incident'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(['--yes', '--context=production-incident'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.queryByText('Confirm the commit message?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await oco.queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(gitDir, 'fix(context): handle production incident');
|
||||
|
||||
const requestPayload = server.requestBodies[
|
||||
server.requestBodies.length - 1
|
||||
] as { messages: Array<{ content: string }> };
|
||||
const requestContents = requestPayload.messages
|
||||
.map((message) => message.content)
|
||||
.join('\n');
|
||||
|
||||
expect(requestContents).toContain('<context>production-incident</context>');
|
||||
expect(requestContents).toContain('console.log("Hello World");');
|
||||
expect(server.authHeaders).toContain('Bearer test-openai-key');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow passes --fgm through to the full GitMoji prompt', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(fgm): use the extended gitmoji specification'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(['--fgm', '--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(fgm): use the extended gitmoji specification'
|
||||
);
|
||||
|
||||
const requestPayload = server.requestBodies[
|
||||
server.requestBodies.length - 1
|
||||
] as { messages: Array<{ content: string }> };
|
||||
const requestContents = requestPayload.messages
|
||||
.map((message) => message.content)
|
||||
.join('\n');
|
||||
|
||||
expect(requestContents).toContain(
|
||||
'🎨, Improve structure / format of the code;'
|
||||
);
|
||||
expect(requestContents).toContain('GitMoji specification');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow allows editing the generated commit message before committing', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(cli): allow editing the generated message'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[ArrowDown][ArrowDown][Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Please edit the commit message: (press Enter to continue)'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard(' before commit[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'fix(cli): allow editing the generated message before commit'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow regenerates the message when the user rejects the first suggestion', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(({ requestIndex }) => ({
|
||||
body: {
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
content:
|
||||
requestIndex === 0
|
||||
? 'fix(cli): first generated message'
|
||||
: 'fix(cli): regenerated message after retry'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}));
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[ArrowDown][Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Do you want to regenerate the message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
oco.clear();
|
||||
expect(
|
||||
await oco.findByText('fix(cli): regenerated message after retry')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(gitDir, 'fix(cli): regenerated message after retry');
|
||||
expect(server.requestBodies).toHaveLength(2);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow lets the user select only specific unstaged files', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(cli): commit only the selected files'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(gitDir, {
|
||||
'alpha.ts': 'console.log("alpha");\n',
|
||||
'beta.ts': 'console.log("beta");\n'
|
||||
});
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(await oco.findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Do you want to stage all files and generate commit message?'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[ArrowDown][Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Select the files you want to add to the commit:')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Space][Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
expect(await getHeadCommitFiles(gitDir)).toEqual(['alpha.ts']);
|
||||
await assertGitStatus(gitDir, '?? beta.ts');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli applies the documented message template placeholder from extra args', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(template): keep generated subject'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(["'$msg #205'"], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(template): keep generated subject #205'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('hook command sets and unsets the prepare-commit-msg symlink', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const hookPath = resolve(gitDir, '.git/hooks/prepare-commit-msg');
|
||||
const cliPath = resolve('./out/cli.cjs');
|
||||
|
||||
try {
|
||||
const setHook = await runCli(['hook', 'set'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
expect(await setHook.findByText('Hook set')).toBeInTheConsole();
|
||||
expect(await waitForExit(setHook)).toBe(0);
|
||||
expect(existsSync(hookPath)).toBe(true);
|
||||
expect(lstatSync(hookPath).isSymbolicLink()).toBe(true);
|
||||
expect(realpathSync(hookPath)).toBe(cliPath);
|
||||
|
||||
const unsetHook = await runCli(['hook', 'unset'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
expect(await unsetHook.findByText('Hook is removed')).toBeInTheConsole();
|
||||
expect(await waitForExit(unsetHook)).toBe(0);
|
||||
expect(existsSync(hookPath)).toBe(false);
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('prepare-commit-msg hook writes the generated message into the commit message file', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(hook): populate the commit message file'
|
||||
);
|
||||
const hookPath = resolve(gitDir, '.git/hooks/prepare-commit-msg');
|
||||
const messageFile = resolve(gitDir, '.git/COMMIT_EDITMSG');
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const setHook = await runCli(['hook', 'set'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await setHook.findByText('Hook set')).toBeInTheConsole();
|
||||
expect(await waitForExit(setHook)).toBe(0);
|
||||
|
||||
writeFileSync(messageFile, '# existing\n');
|
||||
|
||||
const hookRun = await runProcess(hookPath, [messageFile], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(await hookRun.findByText('Done')).toBeInTheConsole();
|
||||
expect(await waitForExit(hookRun)).toBe(0);
|
||||
|
||||
const commitMessage = readFileSync(messageFile, 'utf8');
|
||||
expect(commitMessage).toContain(
|
||||
'# fix(hook): populate the commit message file'
|
||||
);
|
||||
expect(commitMessage).toContain('# ---------- [OpenCommit] ---------- #');
|
||||
expect(commitMessage).toContain('# existing');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow prompts for a missing API key, saves it, and completes the commit', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const homeDir = await prepareTempDir();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(api): recovered after prompt'
|
||||
);
|
||||
|
||||
try {
|
||||
const configPath = writeGlobalConfig(homeDir, [
|
||||
'OCO_AI_PROVIDER=openai',
|
||||
'OCO_MODEL=gpt-4o-mini',
|
||||
`OCO_API_URL=${server.baseUrl}`,
|
||||
'OCO_GITPUSH=false'
|
||||
]);
|
||||
seedMigrations(homeDir);
|
||||
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: {
|
||||
HOME: homeDir
|
||||
}
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText("API key missing for openai. Let's set it up.")
|
||||
).toBeInTheConsole();
|
||||
expect(await oco.findByText('Enter your API key:')).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('test-openai-key[Enter]');
|
||||
|
||||
expect(await oco.findByText('API key saved')).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(gitDir, 'fix(api): recovered after prompt');
|
||||
expect(server.authHeaders).toContain('Bearer test-openai-key');
|
||||
expect(readFileSync(configPath, 'utf8')).toContain(
|
||||
'OCO_API_KEY=test-openai-key'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
rmSync(homeDir, { force: true, recursive: true });
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli ignores files listed in .opencommitignore when they are the only staged changes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'.opencommitignore': 'ignored.ts\n'
|
||||
},
|
||||
{
|
||||
stage: true,
|
||||
commitMessage: 'add opencommit ignore'
|
||||
}
|
||||
);
|
||||
|
||||
writeRepoFile(gitDir, 'ignored.ts', 'console.log("ignored");\n');
|
||||
await runGit(['add', 'ignored.ts'], gitDir);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: {
|
||||
OCO_AI_PROVIDER: 'openai',
|
||||
OCO_API_KEY: 'dummy-openai-key',
|
||||
OCO_GITPUSH: 'false'
|
||||
}
|
||||
});
|
||||
|
||||
expect(await oco.findByText('No changes detected')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(1);
|
||||
await assertHeadCommit(gitDir, 'add opencommit ignore');
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli excludes .opencommitignore files from the generated prompt while still committing staged changes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(ignore): keep only relevant diff context'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'.opencommitignore': 'ignored.ts\n'
|
||||
},
|
||||
{
|
||||
stage: true,
|
||||
commitMessage: 'add opencommit ignore'
|
||||
}
|
||||
);
|
||||
|
||||
writeRepoFile(gitDir, 'kept.ts', 'console.log("kept");\n');
|
||||
writeRepoFile(gitDir, 'ignored.ts', 'console.log("ignored");\n');
|
||||
await runGit(['add', 'kept.ts', 'ignored.ts'], gitDir);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
|
||||
const requestPayload = server.requestBodies[
|
||||
server.requestBodies.length - 1
|
||||
] as { messages: Array<{ content: string }> };
|
||||
const requestContents = requestPayload.messages
|
||||
.map((message) => message.content)
|
||||
.join('\n');
|
||||
|
||||
expect(requestContents).toContain('kept.ts');
|
||||
expect(requestContents).toContain('console.log("kept");');
|
||||
expect(requestContents).not.toContain('ignored.ts');
|
||||
expect(requestContents).not.toContain('console.log("ignored");');
|
||||
expect(await getHeadCommitFiles(gitDir)).toEqual(['ignored.ts', 'kept.ts']);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('first run launches setup, saves config, and completes a commit with the configured provider', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const homeDir = await prepareTempDir();
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(setup): finish first run successfully'
|
||||
);
|
||||
|
||||
try {
|
||||
const configPath = resolve(homeDir, '.opencommit');
|
||||
|
||||
await seedModelCache(homeDir, {
|
||||
openai: ['gpt-4o-mini', 'gpt-4o']
|
||||
});
|
||||
seedMigrations(homeDir);
|
||||
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: {
|
||||
HOME: homeDir,
|
||||
OCO_API_URL: server.baseUrl,
|
||||
OCO_GITPUSH: 'false'
|
||||
}
|
||||
});
|
||||
|
||||
expect(await oco.findByText('Select your AI provider:')).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Enter your API key:')).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('first-run-openai-key[Enter]');
|
||||
|
||||
expect(await oco.findByText('Select a model:')).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Configuration saved to ~/.opencommit')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(setup): finish first run successfully'
|
||||
);
|
||||
expect(readFileSync(configPath, 'utf8')).toContain(
|
||||
'OCO_AI_PROVIDER=openai'
|
||||
);
|
||||
expect(readFileSync(configPath, 'utf8')).toContain(
|
||||
'OCO_API_KEY=first-run-openai-key'
|
||||
);
|
||||
expect(readFileSync(configPath, 'utf8')).toContain('OCO_MODEL=gpt-4o-mini');
|
||||
expect(server.authHeaders).toContain('Bearer first-run-openai-key');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
rmSync(homeDir, { force: true, recursive: true });
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli recovers from a missing model by prompting for an alternative and retrying', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const homeDir = await prepareTempDir();
|
||||
const server = await startMockOpenAiServer(({ requestIndex, body }) => {
|
||||
if (requestIndex === 0) {
|
||||
return {
|
||||
status: 404,
|
||||
body: {
|
||||
error: {
|
||||
message: `The model '${body?.model}' does not exist`,
|
||||
type: 'invalid_request_error',
|
||||
code: 'model_not_found'
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
body: {
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
content: 'fix(model): recover from invalid default model'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
try {
|
||||
const configPath = writeGlobalConfig(homeDir, [
|
||||
'OCO_AI_PROVIDER=openai',
|
||||
'OCO_API_KEY=test-openai-key',
|
||||
'OCO_MODEL=missing-model',
|
||||
`OCO_API_URL=${server.baseUrl}`,
|
||||
'OCO_GITPUSH=false'
|
||||
]);
|
||||
seedMigrations(homeDir);
|
||||
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: {
|
||||
HOME: homeDir
|
||||
}
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText("Model 'missing-model' not found")
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Select an alternative model:')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Save as default model?')).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Model saved as default')).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'fix(model): recover from invalid default model'
|
||||
);
|
||||
expect(server.requestBodies.map((request) => request.model)).toEqual([
|
||||
'missing-model',
|
||||
'gpt-4o-mini'
|
||||
]);
|
||||
expect(readFileSync(configPath, 'utf8')).toContain('OCO_MODEL=gpt-4o-mini');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
rmSync(homeDir, { force: true, recursive: true });
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli excludes lockfiles and assets from the generated prompt while still committing them', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(diff): focus prompt on meaningful source changes'
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'kept.ts': 'console.log("kept");\n',
|
||||
'package-lock.json': '{"name":"opencommit","lockfileVersion":3}\n',
|
||||
'logo.svg':
|
||||
'<svg viewBox="0 0 1 1"><rect width="1" height="1" /></svg>\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await oco.findByText('Successfully committed')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
|
||||
const requestPayload = server.requestBodies[
|
||||
server.requestBodies.length - 1
|
||||
] as { messages: Array<{ content: string }> };
|
||||
const requestContents = requestPayload.messages
|
||||
.map((message) => message.content)
|
||||
.join('\n');
|
||||
|
||||
expect(requestContents).toContain('kept.ts');
|
||||
expect(requestContents).toContain('console.log("kept");');
|
||||
expect(requestContents).not.toContain('package-lock.json');
|
||||
expect(requestContents).not.toContain('lockfileVersion');
|
||||
expect(requestContents).not.toContain('logo.svg');
|
||||
expect(requestContents).not.toContain('<svg');
|
||||
expect(await getHeadCommitFiles(gitDir)).toEqual([
|
||||
'kept.ts',
|
||||
'logo.svg',
|
||||
'package-lock.json'
|
||||
]);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('fails with a non-zero exit code outside a git repository', async () => {
|
||||
const tempDir = await prepareTempDir();
|
||||
|
||||
try {
|
||||
const oco = await runCli([], {
|
||||
cwd: tempDir,
|
||||
env: {
|
||||
OCO_AI_PROVIDER: 'openai',
|
||||
OCO_API_KEY: 'dummy-openai-key',
|
||||
OCO_GITPUSH: 'false'
|
||||
}
|
||||
});
|
||||
|
||||
expect(await waitForExit(oco)).toBe(1);
|
||||
expect(oco.getStdallStr()).toMatch(
|
||||
/No changes detected|not a git repository/
|
||||
);
|
||||
} finally {
|
||||
rmSync(tempDir, { force: true, recursive: true });
|
||||
}
|
||||
});
|
||||
108
test/e2e/geminiBehavior.test.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import {
|
||||
assertHeadCommit,
|
||||
getHeadCommitMessage,
|
||||
getMockGeminiEnv,
|
||||
prepareEnvironment,
|
||||
prepareRepo,
|
||||
runCli,
|
||||
startMockGeminiServer,
|
||||
waitForExit
|
||||
} from './utils';
|
||||
|
||||
it('built CLI ignores Gemini executable code parts when creating the commit message', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment({ remotes: 0 });
|
||||
const server = await startMockGeminiServer({
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
content: {
|
||||
role: 'model',
|
||||
parts: [
|
||||
{ text: 'feat(gemini): keep text output only' },
|
||||
{
|
||||
executableCode: {
|
||||
language: 'python',
|
||||
code: 'print("debug")'
|
||||
}
|
||||
},
|
||||
{
|
||||
codeExecutionResult: {
|
||||
outcome: 'outcome_ok',
|
||||
output: 'debug'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
finishReason: 'STOP'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockGeminiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(gitDir, 'feat(gemini): keep text output only');
|
||||
expect(await getHeadCommitMessage(gitDir)).toBe(
|
||||
'feat(gemini): keep text output only'
|
||||
);
|
||||
expect(server.apiKeys).toContain('test-gemini-key');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('built CLI surfaces Gemini LANGUAGE finish reasons as errors', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment({ remotes: 0 });
|
||||
const server = await startMockGeminiServer({
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
content: {
|
||||
role: 'model',
|
||||
parts: [{ text: 'feat(gemini): should not commit' }]
|
||||
},
|
||||
finishReason: 'LANGUAGE',
|
||||
finishMessage: 'Unsupported language'
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockGeminiEnv(server.baseUrl)
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Gemini response was blocked due to LANGUAGE: Unsupported language'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(1);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
@@ -1,205 +1,216 @@
|
||||
import path from 'path';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { exec } from 'child_process';
|
||||
import { prepareTempDir } from './utils';
|
||||
import { promisify } from 'util';
|
||||
import { render } from 'cli-testing-library';
|
||||
import { resolve } from 'path';
|
||||
import { rm } from 'fs';
|
||||
const fsExec = promisify(exec);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* [no remotes]
|
||||
*/
|
||||
const prepareNoRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
*/
|
||||
const prepareOneRemoteGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* git remote -v
|
||||
*
|
||||
* origin /tmp/remote.git (fetch)
|
||||
* origin /tmp/remote.git (push)
|
||||
* other ../remote2.git (fetch)
|
||||
* other ../remote2.git (push)
|
||||
*/
|
||||
const prepareTwoRemotesGitRepository = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git init --bare other.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
await fsExec('git remote add other ../other.git', { cwd: gitDir });
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
gitDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
import {
|
||||
assertHeadCommit,
|
||||
getCurrentBranchName,
|
||||
getMockOpenAiEnv,
|
||||
getRemoteBranchHeadSubject,
|
||||
prepareEnvironment,
|
||||
prepareRepo,
|
||||
remoteBranchExists,
|
||||
runCli,
|
||||
startMockOpenAiServer,
|
||||
waitForExit
|
||||
} from './utils';
|
||||
|
||||
describe('cli flow to push git branch', () => {
|
||||
it('do nothing when OCO_GITPUSH is set to false', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='false' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
it('does nothing when OCO_GITPUSH is set to false', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment({ remotes: 0 });
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(push): keep the commit local when push is disabled'
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Command failed with exit code 1')
|
||||
).not.toBeInTheConsole();
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
await cleanup();
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'false'
|
||||
})
|
||||
});
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'fix(push): keep the commit local when push is disabled'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('push and cause error when there is no remote', async () => {
|
||||
const { gitDir, cleanup } = await prepareNoRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
it('fails after committing when push is enabled but there is no remote', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment({ remotes: 0 });
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(push): commit even when the push later fails'
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await queryByText('Choose a remote to push to')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Do you want to run `git push`?')
|
||||
).not.toBeInTheConsole();
|
||||
expect(
|
||||
await queryByText('Successfully pushed all commits to origin')
|
||||
).not.toBeInTheConsole();
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('Command failed with exit code 1')
|
||||
).toBeInTheConsole();
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
await cleanup();
|
||||
expect(await waitForExit(oco)).toBe(1);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'fix(push): commit even when the push later fails'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('push when one remote is set', async () => {
|
||||
const { gitDir, cleanup } = await prepareOneRemoteGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
it('pushes to the only configured remote', async () => {
|
||||
const { gitDir, remoteDir, cleanup } = await prepareEnvironment({
|
||||
remotes: 1
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(push): publish the commit to the only remote'
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(push): publish the commit to the only remote'
|
||||
);
|
||||
expect(
|
||||
await getRemoteBranchHeadSubject(
|
||||
remoteDir!,
|
||||
await getCurrentBranchName(gitDir)
|
||||
)
|
||||
).toBe('feat(push): publish the commit to the only remote');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('push when two remotes are set', async () => {
|
||||
const { gitDir, cleanup } = await prepareTwoRemotesGitRepository();
|
||||
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
it('pushes to the selected remote when multiple remotes are configured', async () => {
|
||||
const { gitDir, remoteDir, cleanup } = await prepareEnvironment({
|
||||
remotes: 2
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(push): choose a remote explicitly when several exist'
|
||||
);
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
expect(
|
||||
await findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('Choose a remote to push to')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(push): choose a remote explicitly when several exist'
|
||||
);
|
||||
expect(
|
||||
await getRemoteBranchHeadSubject(
|
||||
remoteDir!,
|
||||
await getCurrentBranchName(gitDir)
|
||||
)
|
||||
).toBe('feat(push): choose a remote explicitly when several exist');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it("keeps the commit local when the user chooses 'don't push'", async () => {
|
||||
const { gitDir, remoteDir, otherRemoteDir, cleanup } =
|
||||
await prepareEnvironment({ remotes: 2 });
|
||||
const server = await startMockOpenAiServer(
|
||||
"fix(push): skip the remote step when the user chooses don't push"
|
||||
);
|
||||
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Choose a remote to push to')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[ArrowDown][ArrowDown][Enter]');
|
||||
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
"fix(push): skip the remote step when the user chooses don't push"
|
||||
);
|
||||
|
||||
const branchName = await getCurrentBranchName(gitDir);
|
||||
expect(await remoteBranchExists(remoteDir!, branchName)).toBe(false);
|
||||
expect(await remoteBranchExists(otherRemoteDir!, branchName)).toBe(false);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
import { prepareEnvironment, runCli, waitForExit } from './utils';
|
||||
|
||||
it('cli flow when there are no changes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const { findByText } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await findByText('No changes detected')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
try {
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: {
|
||||
OCO_AI_PROVIDER: 'openai',
|
||||
OCO_API_KEY: 'dummy-openai-key',
|
||||
OCO_GITPUSH: 'false'
|
||||
}
|
||||
});
|
||||
|
||||
expect(await oco.findByText('No changes detected')).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(1);
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,55 +1,119 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
import {
|
||||
assertHeadCommit,
|
||||
getCurrentBranchName,
|
||||
getMockOpenAiEnv,
|
||||
getRemoteBranchHeadSubject,
|
||||
prepareEnvironment,
|
||||
prepareRepo,
|
||||
runCli,
|
||||
startMockOpenAiServer,
|
||||
appendRepoFile,
|
||||
waitForExit
|
||||
} from './utils';
|
||||
|
||||
it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const { gitDir, remoteDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'feat(cli): commit one staged file through the CLI'
|
||||
);
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
|
||||
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
expect(await oco.queryByText('No files are staged')).not.toBeInTheConsole();
|
||||
expect(
|
||||
await oco.queryByText(
|
||||
'Do you want to stage all files and generate commit message?'
|
||||
)
|
||||
).not.toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'feat(cli): commit one staged file through the CLI'
|
||||
);
|
||||
expect(
|
||||
await getRemoteBranchHeadSubject(
|
||||
remoteDir!,
|
||||
await getCurrentBranchName(gitDir)
|
||||
)
|
||||
).toBe('feat(cli): commit one staged file through the CLI');
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('cli flow to generate commit message for 1 changed file (not staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
const server = await startMockOpenAiServer(
|
||||
'fix(cli): stage modified files before committing'
|
||||
);
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
await render('git' ,[`commit -m 'add new file'`], { cwd: gitDir });
|
||||
try {
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{
|
||||
stage: true,
|
||||
commitMessage: 'add new file'
|
||||
}
|
||||
);
|
||||
appendRepoFile(gitDir, 'index.ts', 'console.log("Good night World");\n');
|
||||
|
||||
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
|
||||
const oco = await runCli(['--yes'], {
|
||||
cwd: gitDir,
|
||||
env: getMockOpenAiEnv(server.baseUrl, {
|
||||
OCO_GITPUSH: 'true'
|
||||
})
|
||||
});
|
||||
|
||||
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await oco.findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Do you want to stage all files and generate commit message?'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully committed')).toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Do you want to run `git push`?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(
|
||||
gitDir,
|
||||
'fix(cli): stage modified files before committing'
|
||||
);
|
||||
} finally {
|
||||
await server.cleanup();
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,146 +1,158 @@
|
||||
import { resolve } from 'path';
|
||||
import { render } from 'cli-testing-library';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment, wait } from '../utils';
|
||||
import { cpSync } from 'fs';
|
||||
import path from 'path';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import {
|
||||
assertHeadCommit,
|
||||
prepareEnvironment,
|
||||
prepareRepo,
|
||||
runCli,
|
||||
waitForExit
|
||||
} from '../utils';
|
||||
|
||||
function getAbsolutePath(relativePath: string) {
|
||||
// Use process.cwd() which should be the project root during test execution
|
||||
return path.resolve(process.cwd(), 'test/e2e/prompt-module', relativePath);
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const getFixturePath = (version: 9 | 18 | 19, fileName: string) =>
|
||||
path.resolve(
|
||||
process.cwd(),
|
||||
`test/e2e/prompt-module/data/commitlint_${version}/${fileName}`
|
||||
);
|
||||
|
||||
const getPromptModuleEnv = (
|
||||
mockType: 'commit-message' | 'prompt-module-commitlint-config'
|
||||
): NodeJS.ProcessEnv => ({
|
||||
OCO_TEST_MOCK_TYPE: mockType,
|
||||
OCO_PROMPT_MODULE: '@commitlint',
|
||||
OCO_AI_PROVIDER: 'test',
|
||||
OCO_GITPUSH: 'true'
|
||||
});
|
||||
|
||||
async function setupCommitlint(dir: string, version: 9 | 18 | 19) {
|
||||
cpSync(
|
||||
getFixturePath(version, 'node_modules'),
|
||||
path.join(dir, 'node_modules'),
|
||||
{
|
||||
recursive: true
|
||||
}
|
||||
);
|
||||
cpSync(
|
||||
getFixturePath(version, 'package.json'),
|
||||
path.join(dir, 'package.json')
|
||||
);
|
||||
cpSync(
|
||||
getFixturePath(version, 'commitlint.config.js'),
|
||||
path.join(dir, 'commitlint.config.js')
|
||||
);
|
||||
}
|
||||
async function setupCommitlint(dir: string, ver: 9 | 18 | 19) {
|
||||
let packagePath, packageJsonPath, configPath;
|
||||
switch (ver) {
|
||||
case 9:
|
||||
packagePath = getAbsolutePath('./data/commitlint_9/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_9/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_9/commitlint.config.js');
|
||||
break;
|
||||
case 18:
|
||||
packagePath = getAbsolutePath('./data/commitlint_18/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_18/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_18/commitlint.config.js');
|
||||
break;
|
||||
case 19:
|
||||
packagePath = getAbsolutePath('./data/commitlint_19/node_modules');
|
||||
packageJsonPath = getAbsolutePath('./data/commitlint_19/package.json');
|
||||
configPath = getAbsolutePath('./data/commitlint_19/commitlint.config.js');
|
||||
break;
|
||||
}
|
||||
await render('cp', ['-r', packagePath, '.'], { cwd: dir });
|
||||
await render('cp', [packageJsonPath, '.'], { cwd: dir });
|
||||
await render('cp', [configPath, '.'], { cwd: dir });
|
||||
await wait(3000); // Avoid flakiness by waiting
|
||||
|
||||
async function assertInstalledCommitlintVersion(
|
||||
cwd: string,
|
||||
version: string
|
||||
): Promise<void> {
|
||||
const { stdout = '', stderr = '' } = await execFileAsync(
|
||||
'npm',
|
||||
['list', '@commitlint/load'],
|
||||
{ cwd }
|
||||
);
|
||||
expect(`${stdout}\n${stderr}`).toContain(`@commitlint/load@${version}`);
|
||||
}
|
||||
|
||||
describe('cli flow to run "oco commitlint force"', () => {
|
||||
it('on commitlint@9 using CJS', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 9);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@9')).toBeInTheConsole();
|
||||
try {
|
||||
await setupCommitlint(gitDir, 9);
|
||||
await assertInstalledCommitlintVersion(gitDir, '9');
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
const oco = await runCli(['commitlint', 'force'], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('prompt-module-commitlint-config')
|
||||
});
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Generating consistency with given @commitlint rules'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('on commitlint@18 using CJS', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 18);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@18')).toBeInTheConsole();
|
||||
try {
|
||||
await setupCommitlint(gitDir, 18);
|
||||
await assertInstalledCommitlintVersion(gitDir, '18');
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
const oco = await runCli(['commitlint', 'force'], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('prompt-module-commitlint-config')
|
||||
});
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Generating consistency with given @commitlint rules'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
it('on commitlint@19 using ESM', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await setupCommitlint(gitDir, 19);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@19')).toBeInTheConsole();
|
||||
try {
|
||||
await setupCommitlint(gitDir, 19);
|
||||
await assertInstalledCommitlintVersion(gitDir, '19');
|
||||
|
||||
const { findByText } = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
const oco = await runCli(['commitlint', 'force'], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('prompt-module-commitlint-config')
|
||||
});
|
||||
|
||||
expect(
|
||||
await findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
|
||||
expect(
|
||||
await findByText('Generating consistency with given @commitlint rules')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('opencommit — configure @commitlint')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Read @commitlint configuration')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText(
|
||||
'Generating consistency with given @commitlint rules'
|
||||
)
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -148,75 +160,59 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
it('on commitlint@19 using ESM', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
// Setup commitlint@19
|
||||
await setupCommitlint(gitDir, 19);
|
||||
const npmList = await render('npm', ['list', '@commitlint/load'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
expect(await npmList.findByText('@commitlint/load@19')).toBeInTheConsole();
|
||||
try {
|
||||
await setupCommitlint(gitDir, 19);
|
||||
await assertInstalledCommitlintVersion(gitDir, '19');
|
||||
|
||||
// Run `oco commitlint force`
|
||||
const commitlintForce = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(
|
||||
await commitlintForce.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
const commitlintForce = await runCli(['commitlint', 'force'], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('prompt-module-commitlint-config')
|
||||
});
|
||||
expect(
|
||||
await commitlintForce.findByText('Done - please review contents of')
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(commitlintForce)).toBe(0);
|
||||
|
||||
// Run `oco commitlint get`
|
||||
const commitlintGet = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint get \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(await commitlintGet.findByText('consistency')).toBeInTheConsole();
|
||||
const commitlintGet = await runCli(['commitlint', 'get'], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('prompt-module-commitlint-config')
|
||||
});
|
||||
expect(await commitlintGet.findByText('consistency')).toBeInTheConsole();
|
||||
expect(await waitForExit(commitlintGet)).toBe(0);
|
||||
|
||||
// Run 'oco' using .opencommit-commitlint
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
cwd: gitDir
|
||||
});
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
await prepareRepo(
|
||||
gitDir,
|
||||
{
|
||||
'index.ts': 'console.log("Hello World");\n'
|
||||
},
|
||||
{ stage: true }
|
||||
);
|
||||
|
||||
const oco = await render(
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='commit-message' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} \
|
||||
`,
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
const oco = await runCli([], {
|
||||
cwd: gitDir,
|
||||
env: getPromptModuleEnv('commit-message')
|
||||
});
|
||||
|
||||
expect(
|
||||
await oco.findByText('Generating the commit message')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
expect(
|
||||
await oco.findByText('Generating the commit message')
|
||||
).toBeInTheConsole();
|
||||
expect(
|
||||
await oco.findByText('Confirm the commit message?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
expect(
|
||||
await oco.findByText('Do you want to run `git push`?')
|
||||
).toBeInTheConsole();
|
||||
oco.userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(
|
||||
await oco.findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
expect(
|
||||
await oco.findByText('Successfully pushed all commits to origin')
|
||||
).toBeInTheConsole();
|
||||
expect(await waitForExit(oco)).toBe(0);
|
||||
await assertHeadCommit(gitDir, 'fix(testAi.ts): test commit message');
|
||||
} finally {
|
||||
await cleanup();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
current_dir=$(pwd)
|
||||
setup_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
|
||||
|
||||
30
test/e2e/smoke.test.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import packageJson from '../../package.json';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { runCli, waitForExit } from './utils';
|
||||
|
||||
it('prints help without entering the interactive flow', async () => {
|
||||
const help = await runCli(['--help'], {
|
||||
cwd: process.cwd()
|
||||
});
|
||||
|
||||
expect(await help.findByText('opencommit')).toBeInTheConsole();
|
||||
expect(await help.findByText('--context')).toBeInTheConsole();
|
||||
expect(await help.findByText('--yes')).toBeInTheConsole();
|
||||
expect(
|
||||
await help.queryByText('Select your AI provider:')
|
||||
).not.toBeInTheConsole();
|
||||
expect(await help.queryByText('Enter your API key:')).not.toBeInTheConsole();
|
||||
expect(await waitForExit(help)).toBe(0);
|
||||
});
|
||||
|
||||
it('prints the current version without booting the CLI runtime', async () => {
|
||||
const version = await runCli(['--version'], {
|
||||
cwd: process.cwd()
|
||||
});
|
||||
|
||||
expect(await version.findByText(packageJson.version)).toBeInTheConsole();
|
||||
expect(
|
||||
await version.queryByText('Generating the commit message')
|
||||
).not.toBeInTheConsole();
|
||||
expect(await waitForExit(version)).toBe(0);
|
||||
});
|
||||
@@ -1,37 +1,558 @@
|
||||
import path from 'path'
|
||||
import { mkdtemp, rm } from 'fs'
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import {
|
||||
appendFileSync,
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
mkdtemp,
|
||||
rm,
|
||||
writeFileSync
|
||||
} from 'fs';
|
||||
import http from 'http';
|
||||
import { tmpdir } from 'os';
|
||||
import { exec } from 'child_process';
|
||||
import { execFile } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import type { AddressInfo } from 'net';
|
||||
import { render } from 'cli-testing-library';
|
||||
import type { RenderResult } from 'cli-testing-library';
|
||||
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsExec = promisify(exec);
|
||||
const fsExecFile = promisify(execFile);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* Prepare the environment for the test
|
||||
* Create a temporary git repository in the temp directory
|
||||
*/
|
||||
export const prepareEnvironment = async (): Promise<{
|
||||
const CLI_PATH = path.resolve(process.cwd(), 'out/cli.cjs');
|
||||
const DEFAULT_TEST_ENV = {
|
||||
OCO_TEST_SKIP_VERSION_CHECK: 'true'
|
||||
};
|
||||
const COMPLETED_MIGRATIONS = [
|
||||
'00_use_single_api_key_and_url',
|
||||
'01_remove_obsolete_config_keys_from_global_file',
|
||||
'02_set_missing_default_values'
|
||||
];
|
||||
|
||||
type ProcessOptions = {
|
||||
cwd: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
};
|
||||
|
||||
type PrepareEnvironmentOptions = {
|
||||
remotes?: 0 | 1 | 2;
|
||||
};
|
||||
|
||||
export const getCliPath = () => CLI_PATH;
|
||||
|
||||
export const runProcess = async (
|
||||
command: string,
|
||||
args: string[] = [],
|
||||
{ cwd, env = {} }: ProcessOptions
|
||||
): Promise<RenderResult> => {
|
||||
return render(command, args, {
|
||||
cwd,
|
||||
spawnOpts: {
|
||||
env: {
|
||||
...process.env,
|
||||
...DEFAULT_TEST_ENV,
|
||||
...env
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const runCli = async (
|
||||
args: string[] = [],
|
||||
options: ProcessOptions
|
||||
): Promise<RenderResult> => {
|
||||
return runProcess(process.execPath, [getCliPath(), ...args], options);
|
||||
};
|
||||
|
||||
export const runGit = async (
|
||||
args: string[],
|
||||
cwd: string
|
||||
): Promise<{ stdout: string; stderr: string }> => {
|
||||
const { stdout = '', stderr = '' } = await fsExecFile('git', args, { cwd });
|
||||
return { stdout, stderr };
|
||||
};
|
||||
|
||||
export const configureGitUser = async (gitDir: string): Promise<void> => {
|
||||
await runGit(['config', 'user.email', 'test@example.com'], gitDir);
|
||||
await runGit(['config', 'user.name', 'Test User'], gitDir);
|
||||
};
|
||||
|
||||
export const prepareEnvironment = async ({
|
||||
remotes = 1
|
||||
}: PrepareEnvironmentOptions = {}): Promise<{
|
||||
tempDir: string;
|
||||
gitDir: string;
|
||||
remoteDir?: string;
|
||||
otherRemoteDir?: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await prepareTempDir();
|
||||
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
let remoteDir: string | undefined;
|
||||
let otherRemoteDir: string | undefined;
|
||||
|
||||
if (remotes === 0) {
|
||||
await fsExecFile('git', ['init', 'test'], { cwd: tempDir });
|
||||
} else {
|
||||
await fsExecFile('git', ['init', '--bare', 'remote.git'], {
|
||||
cwd: tempDir
|
||||
});
|
||||
remoteDir = path.resolve(tempDir, 'remote.git');
|
||||
|
||||
if (remotes === 2) {
|
||||
await fsExecFile('git', ['init', '--bare', 'other.git'], {
|
||||
cwd: tempDir
|
||||
});
|
||||
otherRemoteDir = path.resolve(tempDir, 'other.git');
|
||||
}
|
||||
|
||||
await fsExecFile('git', ['clone', 'remote.git', 'test'], { cwd: tempDir });
|
||||
|
||||
if (remotes === 2) {
|
||||
await runGit(['remote', 'add', 'other', '../other.git'], gitDir);
|
||||
}
|
||||
}
|
||||
|
||||
await configureGitUser(gitDir);
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
}
|
||||
if (existsSync(tempDir)) {
|
||||
await fsRemove(tempDir, { force: true, recursive: true });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
tempDir,
|
||||
gitDir,
|
||||
cleanup,
|
||||
remoteDir,
|
||||
otherRemoteDir,
|
||||
cleanup
|
||||
};
|
||||
};
|
||||
|
||||
export const prepareTempDir = async (): Promise<string> => {
|
||||
return fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
};
|
||||
|
||||
export const prepareRepo = async (
|
||||
gitDir: string,
|
||||
files: Record<string, string>,
|
||||
options: {
|
||||
stage?: string[] | true;
|
||||
commitMessage?: string;
|
||||
} = {}
|
||||
): Promise<void> => {
|
||||
for (const [relativePath, content] of Object.entries(files)) {
|
||||
writeRepoFile(gitDir, relativePath, content);
|
||||
}
|
||||
}
|
||||
|
||||
export const prepareTempDir = async(): Promise<string> => {
|
||||
return await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
}
|
||||
const stageFiles =
|
||||
options.stage === true
|
||||
? Object.keys(files)
|
||||
: Array.isArray(options.stage)
|
||||
? options.stage
|
||||
: options.commitMessage
|
||||
? Object.keys(files)
|
||||
: [];
|
||||
|
||||
export const wait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
if (stageFiles.length > 0) {
|
||||
await runGit(['add', ...stageFiles], gitDir);
|
||||
}
|
||||
|
||||
if (options.commitMessage) {
|
||||
await runGit(['commit', '-m', options.commitMessage], gitDir);
|
||||
}
|
||||
};
|
||||
|
||||
export const writeRepoFile = (
|
||||
gitDir: string,
|
||||
relativePath: string,
|
||||
content: string
|
||||
): void => {
|
||||
const filePath = path.resolve(gitDir, relativePath);
|
||||
mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
writeFileSync(filePath, content);
|
||||
};
|
||||
|
||||
export const appendRepoFile = (
|
||||
gitDir: string,
|
||||
relativePath: string,
|
||||
content: string
|
||||
): void => {
|
||||
const filePath = path.resolve(gitDir, relativePath);
|
||||
mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
appendFileSync(filePath, content);
|
||||
};
|
||||
|
||||
export const writeGlobalConfig = (homeDir: string, lines: string[]): string => {
|
||||
const configPath = path.resolve(homeDir, '.opencommit');
|
||||
writeFileSync(configPath, lines.join('\n'));
|
||||
return configPath;
|
||||
};
|
||||
|
||||
export const seedMigrations = (
|
||||
homeDir: string,
|
||||
completedMigrations: string[] = COMPLETED_MIGRATIONS
|
||||
): string => {
|
||||
const migrationsPath = path.resolve(homeDir, '.opencommit_migrations');
|
||||
writeFileSync(migrationsPath, JSON.stringify(completedMigrations));
|
||||
return migrationsPath;
|
||||
};
|
||||
|
||||
export const seedModelCache = async (
|
||||
homeDir: string,
|
||||
models: Record<string, string[]>
|
||||
): Promise<void> => {
|
||||
const modelCachePath = path.resolve(homeDir, '.opencommit-models.json');
|
||||
writeFileSync(
|
||||
modelCachePath,
|
||||
JSON.stringify(
|
||||
{
|
||||
timestamp: Date.now(),
|
||||
models
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
export const getMockOpenAiEnv = (
|
||||
baseUrl: string,
|
||||
overrides: NodeJS.ProcessEnv = {}
|
||||
): NodeJS.ProcessEnv => ({
|
||||
OCO_AI_PROVIDER: 'openai',
|
||||
OCO_API_KEY: 'test-openai-key',
|
||||
OCO_MODEL: 'gpt-4o-mini',
|
||||
OCO_API_URL: baseUrl,
|
||||
OCO_GITPUSH: 'false',
|
||||
...overrides
|
||||
});
|
||||
|
||||
export const getMockGeminiEnv = (
|
||||
baseUrl: string,
|
||||
overrides: NodeJS.ProcessEnv = {}
|
||||
): NodeJS.ProcessEnv => ({
|
||||
OCO_AI_PROVIDER: 'gemini',
|
||||
OCO_API_KEY: 'test-gemini-key',
|
||||
OCO_MODEL: 'gemini-1.5-flash',
|
||||
OCO_API_URL: baseUrl,
|
||||
OCO_GITPUSH: 'false',
|
||||
...overrides
|
||||
});
|
||||
|
||||
export const wait = (ms: number) =>
|
||||
new Promise((resolve) => setTimeout(resolve, ms));
|
||||
|
||||
export const waitForExit = async (
|
||||
instance: RenderResult,
|
||||
timeoutMs: number = 20_000
|
||||
): Promise<number> => {
|
||||
const startedAt = Date.now();
|
||||
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
const exit = instance.hasExit();
|
||||
if (exit) {
|
||||
return exit.exitCode;
|
||||
}
|
||||
await wait(25);
|
||||
}
|
||||
|
||||
throw new Error('Process did not exit within the expected timeout');
|
||||
};
|
||||
|
||||
export const getHeadCommitSubject = async (gitDir: string): Promise<string> => {
|
||||
const { stdout } = await runGit(['log', '-1', '--pretty=%s'], gitDir);
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
export const getHeadCommitMessage = async (gitDir: string): Promise<string> => {
|
||||
const { stdout } = await runGit(['log', '-1', '--pretty=%B'], gitDir);
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
export const getHeadCommitFiles = async (gitDir: string): Promise<string[]> => {
|
||||
const { stdout } = await runGit(
|
||||
['diff-tree', '--root', '--no-commit-id', '--name-only', '-r', 'HEAD'],
|
||||
gitDir
|
||||
);
|
||||
|
||||
return stdout
|
||||
.split('\n')
|
||||
.map((file) => file.trim())
|
||||
.filter(Boolean)
|
||||
.sort();
|
||||
};
|
||||
|
||||
export const getShortGitStatus = async (gitDir: string): Promise<string> => {
|
||||
const { stdout } = await runGit(['status', '--short'], gitDir);
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
export const getCurrentBranchName = async (gitDir: string): Promise<string> => {
|
||||
const { stdout } = await runGit(['branch', '--show-current'], gitDir);
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
export const getRemoteBranchHeadSubject = async (
|
||||
remoteGitDir: string,
|
||||
branchName: string
|
||||
): Promise<string> => {
|
||||
const { stdout = '' } = await fsExecFile(
|
||||
'git',
|
||||
[
|
||||
'--git-dir',
|
||||
remoteGitDir,
|
||||
'log',
|
||||
'-1',
|
||||
'--pretty=%s',
|
||||
`refs/heads/${branchName}`
|
||||
],
|
||||
{ cwd: process.cwd() }
|
||||
);
|
||||
|
||||
return stdout.trim();
|
||||
};
|
||||
|
||||
export const remoteBranchExists = async (
|
||||
remoteGitDir: string,
|
||||
branchName: string
|
||||
): Promise<boolean> => {
|
||||
try {
|
||||
await fsExecFile(
|
||||
'git',
|
||||
[
|
||||
'--git-dir',
|
||||
remoteGitDir,
|
||||
'rev-parse',
|
||||
'--verify',
|
||||
'--quiet',
|
||||
`refs/heads/${branchName}`
|
||||
],
|
||||
{ cwd: process.cwd() }
|
||||
);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
export const assertHeadCommit = async (
|
||||
gitDir: string,
|
||||
expectedSubject: string
|
||||
): Promise<void> => {
|
||||
expect(await getHeadCommitSubject(gitDir)).toBe(expectedSubject);
|
||||
};
|
||||
|
||||
export const assertGitStatus = async (
|
||||
gitDir: string,
|
||||
expected: string | RegExp
|
||||
): Promise<void> => {
|
||||
const status = await getShortGitStatus(gitDir);
|
||||
if (typeof expected === 'string') {
|
||||
expect(status).toContain(expected);
|
||||
return;
|
||||
}
|
||||
|
||||
expect(status).toMatch(expected);
|
||||
};
|
||||
|
||||
export const startMockOpenAiServer = async (
|
||||
response:
|
||||
| string
|
||||
| ((request: {
|
||||
authorization?: string;
|
||||
body: Record<string, any> | undefined;
|
||||
requestIndex: number;
|
||||
}) => {
|
||||
status?: number;
|
||||
body: Record<string, any>;
|
||||
headers?: Record<string, string>;
|
||||
})
|
||||
): Promise<{
|
||||
authHeaders: string[];
|
||||
requestBodies: Array<Record<string, any>>;
|
||||
baseUrl: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const authHeaders: string[] = [];
|
||||
const requestBodies: Array<Record<string, any>> = [];
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const authorization = req.headers.authorization;
|
||||
if (authorization) {
|
||||
authHeaders.push(
|
||||
Array.isArray(authorization) ? authorization[0] : authorization
|
||||
);
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
req.on('data', (chunk) => {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
});
|
||||
req.on('end', () => {
|
||||
const rawBody = Buffer.concat(chunks).toString('utf8');
|
||||
let parsedBody: Record<string, any> | undefined;
|
||||
if (rawBody) {
|
||||
try {
|
||||
parsedBody = JSON.parse(rawBody);
|
||||
requestBodies.push(parsedBody);
|
||||
} catch {
|
||||
requestBodies.push({ rawBody });
|
||||
}
|
||||
}
|
||||
|
||||
if (req.method === 'POST' && req.url?.includes('/chat/completions')) {
|
||||
const payload =
|
||||
typeof response === 'string'
|
||||
? {
|
||||
status: 200,
|
||||
body: {
|
||||
choices: [
|
||||
{
|
||||
message: {
|
||||
content: response
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
: response({
|
||||
authorization: Array.isArray(authorization)
|
||||
? authorization[0]
|
||||
: authorization,
|
||||
body: parsedBody,
|
||||
requestIndex: requestBodies.length - 1
|
||||
});
|
||||
|
||||
res.writeHead(payload.status ?? 200, {
|
||||
'Content-Type': 'application/json',
|
||||
...payload.headers
|
||||
});
|
||||
res.end(JSON.stringify(payload.body));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'not found' }));
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server.listen(0, '127.0.0.1', () => resolve());
|
||||
});
|
||||
|
||||
const { port } = server.address() as AddressInfo;
|
||||
|
||||
return {
|
||||
authHeaders,
|
||||
requestBodies,
|
||||
baseUrl: `http://127.0.0.1:${port}/v1`,
|
||||
cleanup: () =>
|
||||
new Promise<void>((resolve, reject) => {
|
||||
server.close((error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
export const startMockGeminiServer = async (
|
||||
response:
|
||||
| Record<string, any>
|
||||
| ((request: {
|
||||
apiKey?: string;
|
||||
body: Record<string, any> | undefined;
|
||||
requestIndex: number;
|
||||
}) => {
|
||||
status?: number;
|
||||
body: Record<string, any>;
|
||||
headers?: Record<string, string>;
|
||||
})
|
||||
): Promise<{
|
||||
apiKeys: string[];
|
||||
requestBodies: Array<Record<string, any>>;
|
||||
baseUrl: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const apiKeys: string[] = [];
|
||||
const requestBodies: Array<Record<string, any>> = [];
|
||||
|
||||
const server = http.createServer((req, res) => {
|
||||
const apiKeyHeader = req.headers['x-goog-api-key'];
|
||||
if (apiKeyHeader) {
|
||||
apiKeys.push(
|
||||
Array.isArray(apiKeyHeader) ? apiKeyHeader[0] : apiKeyHeader
|
||||
);
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = [];
|
||||
req.on('data', (chunk) => {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
});
|
||||
req.on('end', () => {
|
||||
const rawBody = Buffer.concat(chunks).toString('utf8');
|
||||
let parsedBody: Record<string, any> | undefined;
|
||||
if (rawBody) {
|
||||
try {
|
||||
parsedBody = JSON.parse(rawBody);
|
||||
requestBodies.push(parsedBody);
|
||||
} catch {
|
||||
requestBodies.push({ rawBody });
|
||||
}
|
||||
}
|
||||
|
||||
if (req.method === 'POST' && req.url?.includes(':generateContent')) {
|
||||
const payload =
|
||||
typeof response === 'function'
|
||||
? response({
|
||||
apiKey: Array.isArray(apiKeyHeader)
|
||||
? apiKeyHeader[0]
|
||||
: apiKeyHeader,
|
||||
body: parsedBody,
|
||||
requestIndex: requestBodies.length - 1
|
||||
})
|
||||
: {
|
||||
status: 200,
|
||||
body: response
|
||||
};
|
||||
|
||||
res.writeHead(payload.status ?? 200, {
|
||||
'Content-Type': 'application/json',
|
||||
...payload.headers
|
||||
});
|
||||
res.end(JSON.stringify(payload.body));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'not found' }));
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server.listen(0, '127.0.0.1', () => resolve());
|
||||
});
|
||||
|
||||
const { port } = server.address() as AddressInfo;
|
||||
|
||||
return {
|
||||
apiKeys,
|
||||
requestBodies,
|
||||
baseUrl: `http://127.0.0.1:${port}`,
|
||||
cleanup: () =>
|
||||
new Promise<void>((resolve, reject) => {
|
||||
server.close((error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
})
|
||||
};
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@ import { configure } from 'cli-testing-library';
|
||||
global.jest = jest;
|
||||
|
||||
/**
|
||||
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
|
||||
* CLI rendering gets noticeably slower under coverage and on CI, so keep a
|
||||
* slightly roomier timeout than the library default.
|
||||
*/
|
||||
configure({ asyncUtilTimeout: 2000 });
|
||||
configure({ asyncUtilTimeout: 10000 });
|
||||
|
||||
@@ -122,14 +122,15 @@ describe('config', () => {
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
expect(config.OCO_OMIT_SCOPE).toEqual(true);
|
||||
});
|
||||
|
||||
|
||||
it('should handle custom HTTP headers correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_CUSTOM_HEADERS: '{"X-Global-Header": "global-value"}'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_API_CUSTOM_HEADERS: '{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}'
|
||||
OCO_API_CUSTOM_HEADERS:
|
||||
'{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
@@ -138,8 +139,11 @@ describe('config', () => {
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_CUSTOM_HEADERS).toEqual({"Authorization": "Bearer token123", "X-Custom-Header": "test-value"});
|
||||
|
||||
expect(config.OCO_API_CUSTOM_HEADERS).toEqual({
|
||||
Authorization: 'Bearer token123',
|
||||
'X-Custom-Header': 'test-value'
|
||||
});
|
||||
|
||||
// No need to parse JSON again since it's already an object
|
||||
const parsedHeaders = config.OCO_API_CUSTOM_HEADERS;
|
||||
expect(parsedHeaders).toHaveProperty('Authorization', 'Bearer token123');
|
||||
@@ -199,6 +203,48 @@ describe('config', () => {
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_KEY).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should not create a global config file when only reading defaults', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
rmSync(globalConfigFile.filePath);
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config.OCO_MODEL).toEqual(DEFAULT_CONFIG.OCO_MODEL);
|
||||
expect(existsSync(globalConfigFile.filePath)).toBe(false);
|
||||
});
|
||||
|
||||
it('should not materialize ambient proxy env vars into OCO_PROXY', async () => {
|
||||
process.env.HTTPS_PROXY = 'http://127.0.0.1:7890';
|
||||
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
envConfigFile = await generateConfig('.env', {});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config.OCO_PROXY).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should parse OCO_PROXY=null from local .env as explicit disable', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_PROXY: 'http://global-proxy:8080'
|
||||
});
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_PROXY: 'null'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config.OCO_PROXY).toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setConfig', () => {
|
||||
@@ -325,5 +371,20 @@ describe('config', () => {
|
||||
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
|
||||
});
|
||||
|
||||
it('should persist OCO_PROXY=null as an explicit disable', async () => {
|
||||
await setConfig(
|
||||
[[CONFIG_KEYS.OCO_PROXY, null]],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath
|
||||
});
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
|
||||
expect(config.OCO_PROXY).toEqual(null);
|
||||
expect(fileContent).toContain('OCO_PROXY=null');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
29
test/unit/errors.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import {
|
||||
formatUserFriendlyError,
|
||||
ServiceUnavailableError
|
||||
} from '../../src/utils/errors';
|
||||
|
||||
describe('formatUserFriendlyError', () => {
|
||||
it('should keep provider wording when no custom API URL is configured', () => {
|
||||
const formatted = formatUserFriendlyError(
|
||||
new ServiceUnavailableError('openai'),
|
||||
'openai'
|
||||
);
|
||||
|
||||
expect(formatted.message).toEqual(
|
||||
'The openai service is temporarily unavailable.'
|
||||
);
|
||||
});
|
||||
|
||||
it('should use configured endpoint wording when a custom API URL is provided', () => {
|
||||
const formatted = formatUserFriendlyError(
|
||||
new ServiceUnavailableError('openai'),
|
||||
'openai',
|
||||
{ baseURL: 'http://127.0.0.1:1234/v1' }
|
||||
);
|
||||
|
||||
expect(formatted.message).toContain('configured API endpoint');
|
||||
expect(formatted.message).toContain('127.0.0.1:1234');
|
||||
expect(formatted.message).not.toContain('openai service');
|
||||
});
|
||||
});
|
||||
@@ -1,96 +1,133 @@
|
||||
import { FinishReason, Outcome } from '@google/generative-ai';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GeminiEngine } from '../../src/engine/gemini';
|
||||
|
||||
import { GenerativeModel, GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import {
|
||||
ConfigType,
|
||||
getConfig,
|
||||
OCO_AI_PROVIDER_ENUM
|
||||
} from '../../src/commands/config';
|
||||
import { OpenAI } from 'openai';
|
||||
|
||||
describe('Gemini', () => {
|
||||
let gemini: GeminiEngine;
|
||||
let mockConfig: ConfigType;
|
||||
let mockGoogleGenerativeAi: GoogleGenerativeAI;
|
||||
let mockGenerativeModel: GenerativeModel;
|
||||
let mockExit: jest.SpyInstance<never, [code?: number | undefined], any>;
|
||||
|
||||
const noop: (...args: any[]) => any = (...args: any[]) => {};
|
||||
|
||||
const mockGemini = () => {
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
gemini = new GeminiEngine({
|
||||
apiKey: mockConfig.OCO_API_KEY,
|
||||
model: mockConfig.OCO_MODEL
|
||||
describe('GeminiEngine', () => {
|
||||
it('maps OpenAI-style chat messages into Gemini request payloads and ignores non-text parts', async () => {
|
||||
const engine = new GeminiEngine({
|
||||
apiKey: 'mock-api-key',
|
||||
model: 'gemini-1.5-flash',
|
||||
baseURL: 'http://127.0.0.1:8080/v1',
|
||||
maxTokensOutput: 256,
|
||||
maxTokensInput: 4096
|
||||
});
|
||||
};
|
||||
|
||||
const oldEnv = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = { ...oldEnv };
|
||||
|
||||
jest.mock('@google/generative-ai');
|
||||
jest.mock('../src/commands/config');
|
||||
|
||||
jest.mock('@clack/prompts', () => ({
|
||||
intro: jest.fn(),
|
||||
outro: jest.fn()
|
||||
}));
|
||||
|
||||
mockExit = jest.spyOn(process, 'exit').mockImplementation();
|
||||
|
||||
mockConfig = getConfig() as ConfigType;
|
||||
|
||||
mockConfig.OCO_AI_PROVIDER = OCO_AI_PROVIDER_ENUM.GEMINI;
|
||||
mockConfig.OCO_API_KEY = 'mock-api-key';
|
||||
mockConfig.OCO_MODEL = 'gemini-1.5-flash';
|
||||
|
||||
mockGoogleGenerativeAi = new GoogleGenerativeAI(mockConfig.OCO_API_KEY);
|
||||
mockGenerativeModel = mockGoogleGenerativeAi.getGenerativeModel({
|
||||
model: mockConfig.OCO_MODEL
|
||||
const generateContent = jest.fn().mockResolvedValue({
|
||||
response: {
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
content: {
|
||||
role: 'model',
|
||||
parts: [
|
||||
{
|
||||
text: 'feat(gemini): translate the diff<think>hidden</think>'
|
||||
},
|
||||
{
|
||||
executableCode: {
|
||||
language: 'python',
|
||||
code: 'print("hidden")'
|
||||
}
|
||||
},
|
||||
{
|
||||
codeExecutionResult: {
|
||||
outcome: Outcome.OUTCOME_OK,
|
||||
output: 'hidden'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
finishReason: FinishReason.STOP
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
const getGenerativeModel = jest.fn().mockReturnValue({
|
||||
generateContent
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
gemini = undefined as any;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
mockExit.mockRestore();
|
||||
process.env = oldEnv;
|
||||
});
|
||||
|
||||
it.skip('should exit process if OCO_GEMINI_API_KEY is not set and command is not config', () => {
|
||||
process.env.OCO_GEMINI_API_KEY = undefined;
|
||||
process.env.OCO_AI_PROVIDER = 'gemini';
|
||||
|
||||
mockGemini();
|
||||
|
||||
expect(mockExit).toHaveBeenCalledWith(1);
|
||||
});
|
||||
|
||||
it('should generate commit message', async () => {
|
||||
const mockGenerateContent = jest
|
||||
.fn()
|
||||
.mockResolvedValue({ response: { text: () => 'generated content' } });
|
||||
mockGenerativeModel.generateContent = mockGenerateContent;
|
||||
|
||||
mockGemini();
|
||||
engine.client = {
|
||||
getGenerativeModel
|
||||
} as any;
|
||||
|
||||
const messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam> =
|
||||
[
|
||||
{ role: 'system', content: 'system message' },
|
||||
{ role: 'assistant', content: 'assistant message' }
|
||||
{ role: 'assistant', content: 'assistant guidance' },
|
||||
{ role: 'user', content: 'diff --git a/file b/file' }
|
||||
];
|
||||
|
||||
jest
|
||||
.spyOn(gemini, 'generateCommitMessage')
|
||||
.mockImplementation(async () => 'generated content');
|
||||
const result = await gemini.generateCommitMessage(messages);
|
||||
const result = await engine.generateCommitMessage(messages);
|
||||
|
||||
expect(result).toEqual('generated content');
|
||||
expect(result).toEqual('feat(gemini): translate the diff');
|
||||
expect(getGenerativeModel).toHaveBeenCalledWith(
|
||||
{
|
||||
model: 'gemini-1.5-flash',
|
||||
systemInstruction: 'system message'
|
||||
},
|
||||
{
|
||||
baseUrl: 'http://127.0.0.1:8080/v1'
|
||||
}
|
||||
);
|
||||
expect(generateContent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
contents: [
|
||||
{
|
||||
parts: [{ text: 'assistant guidance' }],
|
||||
role: 'model'
|
||||
},
|
||||
{
|
||||
parts: [{ text: 'diff --git a/file b/file' }],
|
||||
role: 'user'
|
||||
}
|
||||
],
|
||||
generationConfig: expect.objectContaining({
|
||||
maxOutputTokens: 256,
|
||||
temperature: 0,
|
||||
topP: 0.1
|
||||
})
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('fails when Gemini reports a blocked finish reason', async () => {
|
||||
const engine = new GeminiEngine({
|
||||
apiKey: 'mock-api-key',
|
||||
model: 'gemini-1.5-flash',
|
||||
baseURL: 'http://127.0.0.1:8080/v1',
|
||||
maxTokensOutput: 256,
|
||||
maxTokensInput: 4096
|
||||
});
|
||||
|
||||
const generateContent = jest.fn().mockResolvedValue({
|
||||
response: {
|
||||
candidates: [
|
||||
{
|
||||
index: 0,
|
||||
content: {
|
||||
role: 'model',
|
||||
parts: [{ text: 'feat(gemini): should not pass' }]
|
||||
},
|
||||
finishReason: FinishReason.LANGUAGE,
|
||||
finishMessage: 'Unsupported language'
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
engine.client = {
|
||||
getGenerativeModel: jest.fn().mockReturnValue({
|
||||
generateContent
|
||||
})
|
||||
} as any;
|
||||
|
||||
await expect(
|
||||
engine.generateCommitMessage([
|
||||
{ role: 'system', content: 'system message' },
|
||||
{ role: 'user', content: 'diff --git a/file b/file' }
|
||||
])
|
||||
).rejects.toThrow(
|
||||
'Gemini response was blocked due to LANGUAGE: Unsupported language'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
64
test/unit/ollama.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { OllamaEngine } from '../../src/engine/ollama';
|
||||
|
||||
describe('OllamaEngine', () => {
|
||||
it('sends think=false when configured', async () => {
|
||||
const engine = new OllamaEngine({
|
||||
apiKey: 'ollama',
|
||||
model: 'qwen3.5:2b',
|
||||
maxTokensOutput: 500,
|
||||
maxTokensInput: 4096,
|
||||
ollamaThink: false
|
||||
});
|
||||
|
||||
const post = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
message: {
|
||||
content: 'feat: add support for ollama think config'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
engine.client = { post } as any;
|
||||
|
||||
await engine.generateCommitMessage([
|
||||
{ role: 'user', content: 'diff --git a/file b/file' }
|
||||
]);
|
||||
|
||||
expect(post).toHaveBeenCalledWith(
|
||||
'http://localhost:11434/api/chat',
|
||||
expect.objectContaining({
|
||||
think: false
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('omits think when not configured', async () => {
|
||||
const engine = new OllamaEngine({
|
||||
apiKey: 'ollama',
|
||||
model: 'qwen3.5:2b',
|
||||
maxTokensOutput: 500,
|
||||
maxTokensInput: 4096
|
||||
});
|
||||
|
||||
const post = jest.fn().mockResolvedValue({
|
||||
data: {
|
||||
message: {
|
||||
content: 'feat: add support for ollama think config'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
engine.client = { post } as any;
|
||||
|
||||
await engine.generateCommitMessage([
|
||||
{ role: 'user', content: 'diff --git a/file b/file' }
|
||||
]);
|
||||
|
||||
expect(post).toHaveBeenCalledWith(
|
||||
'http://localhost:11434/api/chat',
|
||||
expect.not.objectContaining({
|
||||
think: expect.anything()
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
71
test/unit/openAi.test.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { OpenAI } from 'openai';
|
||||
import { OpenAiEngine } from '../../src/engine/openAi';
|
||||
|
||||
describe('OpenAiEngine', () => {
|
||||
const baseConfig = {
|
||||
apiKey: 'test-openai-key',
|
||||
maxTokensInput: 4096,
|
||||
maxTokensOutput: 256
|
||||
};
|
||||
|
||||
const messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam> = [
|
||||
{ role: 'system', content: 'system message' },
|
||||
{ role: 'user', content: 'diff --git a/file b/file' }
|
||||
];
|
||||
|
||||
it('uses max_completion_tokens for reasoning models', async () => {
|
||||
const engine = new OpenAiEngine({
|
||||
...baseConfig,
|
||||
model: 'o3-mini'
|
||||
});
|
||||
|
||||
const create = jest
|
||||
.spyOn(engine.client.chat.completions, 'create')
|
||||
.mockResolvedValue({
|
||||
choices: [{ message: { content: 'feat(openai): reasoning path' } }]
|
||||
} as any);
|
||||
|
||||
await engine.generateCommitMessage(messages);
|
||||
|
||||
expect(create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
model: 'o3-mini',
|
||||
max_completion_tokens: 256
|
||||
})
|
||||
);
|
||||
expect(create).toHaveBeenCalledWith(
|
||||
expect.not.objectContaining({
|
||||
max_tokens: expect.anything()
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('uses max_tokens and sampling params for non-reasoning models', async () => {
|
||||
const engine = new OpenAiEngine({
|
||||
...baseConfig,
|
||||
model: 'gpt-4o-mini'
|
||||
});
|
||||
|
||||
const create = jest
|
||||
.spyOn(engine.client.chat.completions, 'create')
|
||||
.mockResolvedValue({
|
||||
choices: [{ message: { content: 'feat(openai): standard path' } }]
|
||||
} as any);
|
||||
|
||||
await engine.generateCommitMessage(messages);
|
||||
|
||||
expect(create).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
model: 'gpt-4o-mini',
|
||||
max_tokens: 256,
|
||||
temperature: 0,
|
||||
top_p: 0.1
|
||||
})
|
||||
);
|
||||
expect(create).toHaveBeenCalledWith(
|
||||
expect.not.objectContaining({
|
||||
max_completion_tokens: expect.anything()
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
126
test/unit/proxy.test.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import axios from 'axios';
|
||||
import { getGlobalDispatcher } from 'undici';
|
||||
import { AnthropicEngine } from '../../src/engine/anthropic';
|
||||
import { OpenAiEngine } from '../../src/engine/openAi';
|
||||
import { resolveProxy, setupProxy } from '../../src/utils/proxy';
|
||||
|
||||
describe('proxy utilities', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
const originalAxiosProxy = axios.defaults.proxy;
|
||||
const originalAxiosHttpAgent = axios.defaults.httpAgent;
|
||||
const originalAxiosHttpsAgent = axios.defaults.httpsAgent;
|
||||
|
||||
function resetEnv(env: NodeJS.ProcessEnv) {
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
if (!(key in env)) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = env[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
resetEnv(originalEnv);
|
||||
setupProxy(undefined);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetEnv(originalEnv);
|
||||
setupProxy(undefined);
|
||||
axios.defaults.proxy = originalAxiosProxy;
|
||||
axios.defaults.httpAgent = originalAxiosHttpAgent;
|
||||
axios.defaults.httpsAgent = originalAxiosHttpsAgent;
|
||||
});
|
||||
|
||||
it('should prefer an explicit proxy URL over ambient proxy env vars', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
expect(resolveProxy('http://explicit-proxy:8080')).toEqual(
|
||||
'http://explicit-proxy:8080'
|
||||
);
|
||||
});
|
||||
|
||||
it('should return null when proxy is explicitly disabled', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
expect(resolveProxy(null)).toEqual(null);
|
||||
});
|
||||
|
||||
it('should fall back to ambient proxy env vars when proxy is unset', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
expect(resolveProxy(undefined)).toEqual('http://ambient-proxy:8080');
|
||||
});
|
||||
|
||||
it('should disable proxy usage when setupProxy receives null', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
setupProxy(null);
|
||||
|
||||
expect(getGlobalDispatcher().constructor.name).toEqual('Agent');
|
||||
expect(axios.defaults.proxy).toEqual(false);
|
||||
expect(axios.defaults.httpAgent).toBeUndefined();
|
||||
expect(axios.defaults.httpsAgent).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should install proxy agents when setupProxy receives a proxy URL', () => {
|
||||
setupProxy('http://127.0.0.1:7890');
|
||||
|
||||
expect(getGlobalDispatcher().constructor.name).toEqual('ProxyAgent');
|
||||
expect(axios.defaults.proxy).toEqual(false);
|
||||
expect(axios.defaults.httpAgent).toBeDefined();
|
||||
expect(axios.defaults.httpsAgent).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('engine proxy handling', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
const baseConfig = {
|
||||
apiKey: 'test-key',
|
||||
model: 'gpt-4o-mini',
|
||||
maxTokensInput: 4096,
|
||||
maxTokensOutput: 256
|
||||
};
|
||||
|
||||
function resetEnv(env: NodeJS.ProcessEnv) {
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
if (!(key in env)) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = env[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
resetEnv(originalEnv);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetEnv(originalEnv);
|
||||
});
|
||||
|
||||
it('should not let OpenAI engine re-read proxy env vars when proxy is unset', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
const engine = new OpenAiEngine({
|
||||
...baseConfig,
|
||||
proxy: undefined
|
||||
});
|
||||
|
||||
expect(engine.client.httpAgent).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should not let Anthropic engine re-read proxy env vars when proxy is unset', () => {
|
||||
process.env.HTTPS_PROXY = 'http://ambient-proxy:8080';
|
||||
|
||||
const engine = new AnthropicEngine({
|
||||
...baseConfig,
|
||||
model: 'claude-sonnet-4-20250514',
|
||||
proxy: undefined
|
||||
});
|
||||
|
||||
expect(engine.client.httpAgent).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -8,7 +8,8 @@ describe('removeContentTags', () => {
|
||||
});
|
||||
|
||||
it('should handle multiple tag occurrences', () => {
|
||||
const content = '<think>hidden</think> visible <think>also hidden</think> text';
|
||||
const content =
|
||||
'<think>hidden</think> visible <think>also hidden</think> text';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('visible text');
|
||||
});
|
||||
@@ -26,7 +27,8 @@ describe('removeContentTags', () => {
|
||||
});
|
||||
|
||||
it('should work with different tag names', () => {
|
||||
const content = 'This is <custom>something to hide</custom> visible content';
|
||||
const content =
|
||||
'This is <custom>something to hide</custom> visible content';
|
||||
const result = removeContentTags(content, 'custom');
|
||||
expect(result).toBe('This is visible content');
|
||||
});
|
||||
|
||||