Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fdd4d89bba | ||
|
|
d70797b864 | ||
|
|
ebbaff0628 | ||
|
|
4f164a31d1 | ||
|
|
a70a2b8a9f | ||
|
|
52bb719f4e | ||
|
|
c904a78cd9 | ||
|
|
22077399fd |
BIN
.github/github-mark-white.png
vendored
|
Before Width: | Height: | Size: 4.7 KiB After Width: | Height: | Size: 2.8 KiB |
BIN
.github/logo-black.png
vendored
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 1.7 KiB |
14
.github/logo-grad.svg
vendored
@@ -1,13 +1 @@
|
||||
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z" fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D33075"/>
|
||||
<stop offset="1" stop-color="#6157D8"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear_498_146" x1="38.9416" y1="0" x2="38.9416" y2="75" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#D33075"/>
|
||||
<stop offset="1" stop-color="#6157D8"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="url(#paint0_linear_498_146)" stroke="url(#paint1_linear_498_146)" d="M32.269 2.94345C34.6328 4.17458 36.5623 5.81371 38.0626 7.86409C37.7038 8.37105 37.3661 8.90001 37.0496 9.45094L37.0495 9.45091L37.0456 9.45797C35.2629 12.6805 34.3831 16.5345 34.3831 21V54C34.3831 58.4007 35.2636 62.2523 37.0435 65.5381L37.0433 65.5382L37.0496 65.5491C37.3661 66.1 37.7038 66.629 38.0626 67.1359C36.5622 69.1863 34.6328 70.8254 32.269 72.0565L32.2652 72.0586C29.2195 73.6786 25.5374 74.5 21.2 74.5C16.8638 74.5 13.1471 73.6791 10.0328 72.0575C6.98854 70.4377 4.62693 68.1096 2.94057 65.0635C1.31973 61.949 0.5 58.2664 0.5 54V21C0.5 16.6643 1.32072 12.9834 2.93951 9.93843C4.62596 6.89138 6.98794 4.56255 10.0329 2.94245C13.1472 1.32089 16.8639 0.5 21.2 0.5C25.5374 0.5 29.2195 1.32137 32.2652 2.94145L32.269 2.94345ZM38.6667 8.74806C38.9107 9.13077 39.1413 9.52635 39.3586 9.93481L39.3585 9.93484L39.3625 9.94203C41.047 12.9872 41.9 16.6336 41.9 20.9V54C41.9 58.266 41.0472 61.9477 39.3603 65.0619L39.3586 65.0652C39.1413 65.4736 38.9107 65.8692 38.6667 66.2519C38.4054 65.8665 38.1565 65.468 37.9199 65.0565C36.235 61.9435 35.3831 58.2635 35.3831 54V21C35.3831 16.6672 36.236 12.989 37.9187 9.94557C38.1556 9.53328 38.405 9.13412 38.6667 8.74806ZM39.2936 7.87926C40.8728 5.82164 42.8446 4.17787 45.2123 2.94436C48.3955 1.32076 52.1474 0.5 56.4831 0.5C60.8172 0.5 64.5319 1.3534 67.645 3.03964L67.6449 3.0397L67.6522 3.04345C70.7657 4.6651 73.1602 6.99537 74.8456 10.042C76.464 12.9676 77.3148 16.448 77.3792 20.5H69.3778C69.2917 16.5201 68.1674 13.3804 65.942 11.1517C63.6909 8.76341 60.5126 7.6 56.4831 7.6C52.4533 7.6 49.2164 8.72969 46.8349 11.0412L46.8348 11.0412L46.8296 11.0464C44.5081 13.3679 43.3831 16.6791 43.3831 20.9V54C43.3831 58.2218 44.5085 61.5622 46.8243 63.9482L46.8295 63.9536L46.8349 63.9588C49.2164 66.2703 52.4533 67.4 56.4831 67.4C60.5114 67.4 63.6898 66.2708 65.9421 63.9481C68.1656 61.657 69.2916 58.4862 69.3778 54.5H77.379C77.3138 58.4875 76.4638 61.9697 74.8444 64.9601C73.1588 68.0063 70.7636 70.3703 67.6486 72.0584C64.5346 73.6794 60.8185 74.5 56.4831 74.5C52.1474 74.5 48.3956 73.6793 45.2125 72.0557C42.8446 70.8222 40.8729 69.1784 39.2936 67.1207C39.6322 66.6146 39.9479 66.0865 40.2405 65.5365C42.0198 62.251 42.9 58.4 42.9 54V20.9C42.9 16.5014 42.0203 12.6824 40.2396 9.46166C39.9472 8.91234 39.6319 8.38486 39.2936 7.87926ZM11.8359 63.9427L11.8359 63.9427L11.841 63.9481C14.0918 66.2691 17.2355 67.4 21.2 67.4C25.2274 67.4 28.3768 66.2711 30.5644 63.9423C32.8103 61.5559 33.9 58.2177 33.9 54V21C33.9 16.7865 32.8123 13.4792 30.5643 11.1575C28.378 8.76316 25.2286 7.6 21.2 7.6C17.2326 7.6 14.088 8.76605 11.8384 11.1546C9.58856 13.4765 8.5 16.7848 8.5 21V54C8.5 58.2179 9.58979 61.5562 11.8359 63.9427Z"/><defs><linearGradient id="paint0_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient><linearGradient id="paint1_linear_498_146" x1="38.942" x2="38.942" y1="0" y2="75" gradientUnits="userSpaceOnUse"><stop stop-color="#D33075"/><stop offset="1" stop-color="#6157D8"/></linearGradient></defs></svg>
|
||||
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
5
.github/logo.svg
vendored
@@ -1,4 +1 @@
|
||||
<svg width="78" height="75" viewBox="0 0 78 75" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z" fill="black"/>
|
||||
<path d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z" fill="black"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="78" height="75" fill="none" viewBox="0 0 78 75"><path fill="#000" d="M21.2 75C16.8 75 13 74.1667 9.8 72.5C6.66667 70.8333 4.23333 68.4333 2.5 65.3C0.833333 62.1 0 58.3333 0 54V21C0 16.6 0.833333 12.8333 2.5 9.7C4.23333 6.56666 6.66667 4.16666 9.8 2.5C13 0.833333 16.8 0 21.2 0C25.6 0 29.3667 0.833333 32.5 2.5C35.7 4.16666 38.1333 6.56666 39.8 9.7C41.5333 12.8333 42.4 16.5667 42.4 20.9V54C42.4 58.3333 41.5333 62.1 39.8 65.3C38.1333 68.4333 35.7 70.8333 32.5 72.5C29.3667 74.1667 25.6 75 21.2 75ZM21.2 66.9C25.1333 66.9 28.1333 65.8 30.2 63.6C32.3333 61.3333 33.4 58.1333 33.4 54V21C33.4 16.8667 32.3333 13.7 30.2 11.5C28.1333 9.23333 25.1333 8.1 21.2 8.1C17.3333 8.1 14.3333 9.23333 12.2 11.5C10.0667 13.7 9 16.8667 9 21V54C9 58.1333 10.0667 61.3333 12.2 63.6C14.3333 65.8 17.3333 66.9 21.2 66.9Z"/><path fill="#000" d="M56.4831 75C52.0831 75 48.2498 74.1667 44.9831 72.5C41.7831 70.8333 39.2831 68.4333 37.4831 65.3C35.7498 62.1 34.8831 58.3333 34.8831 54V21C34.8831 16.6 35.7498 12.8333 37.4831 9.7C39.2831 6.56666 41.7831 4.16666 44.9831 2.5C48.2498 0.833333 52.0831 0 56.4831 0C60.8831 0 64.6831 0.866665 67.8831 2.6C71.0831 4.26667 73.5498 6.66667 75.2831 9.8C77.0165 12.9333 77.8831 16.6667 77.8831 21H68.8831C68.8831 16.8667 67.7831 13.7 65.5831 11.5C63.4498 9.23333 60.4165 8.1 56.4831 8.1C52.5498 8.1 49.4498 9.2 47.1831 11.4C44.9831 13.6 43.8831 16.7667 43.8831 20.9V54C43.8831 58.1333 44.9831 61.3333 47.1831 63.6C49.4498 65.8 52.5498 66.9 56.4831 66.9C60.4165 66.9 63.4498 65.8 65.5831 63.6C67.7831 61.3333 68.8831 58.1333 68.8831 54H77.8831C77.8831 58.2667 77.0165 62 75.2831 65.2C73.5498 68.3333 71.0831 70.7667 67.8831 72.5C64.6831 74.1667 60.8831 75 56.4831 75Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
BIN
.github/opencommit-example.png
vendored
|
Before Width: | Height: | Size: 304 KiB After Width: | Height: | Size: 237 KiB |
16
README.md
@@ -74,6 +74,22 @@ oco config set OCO_API_URL='http://192.168.1.10:11434/api/chat'
|
||||
|
||||
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
|
||||
|
||||
#### Troubleshooting Ollama IPv6/IPv4 Connection Fix
|
||||
|
||||
If you encounter issues with Ollama, such as the error
|
||||
|
||||
```sh
|
||||
✖ local model issues. details: connect ECONNREFUSED ::1:11434
|
||||
```
|
||||
|
||||
It's likely because Ollama is not listening on IPv6 by default. To fix this, you can set the OLLAMA_HOST environment variable to 0.0.0.0 before starting Ollama:
|
||||
|
||||
```bash
|
||||
export OLLAMA_HOST=0.0.0.0
|
||||
```
|
||||
|
||||
This will make Ollama listen on all interfaces, including IPv6 and IPv4, resolving the connection issue. You can add this line to your shell configuration file (like `.bashrc` or `.zshrc`) to make it persistent across sessions.
|
||||
|
||||
### Flags
|
||||
|
||||
There are multiple optional flags that can be used with the `oco` command:
|
||||
|
||||
2964
out/cli.cjs
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.2.10",
|
||||
"version": "3.2.11",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "opencommit",
|
||||
"version": "3.2.10",
|
||||
"version": "3.2.11",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.2.10",
|
||||
"version": "3.2.11",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
|
||||
22
src/cli.ts
@@ -8,6 +8,12 @@ import { commitlintConfigCommand } from './commands/commitlint';
|
||||
import { configCommand } from './commands/config';
|
||||
import { hookCommand, isHookCalled } from './commands/githook.js';
|
||||
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
|
||||
import {
|
||||
setupCommand,
|
||||
isFirstRun,
|
||||
runSetup,
|
||||
promptForMissingApiKey
|
||||
} from './commands/setup';
|
||||
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
|
||||
import { runMigrations } from './migrations/_run.js';
|
||||
|
||||
@@ -17,7 +23,7 @@ cli(
|
||||
{
|
||||
version: packageJSON.version,
|
||||
name: 'opencommit',
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand, setupCommand],
|
||||
flags: {
|
||||
fgm: {
|
||||
type: Boolean,
|
||||
@@ -47,6 +53,20 @@ cli(
|
||||
if (await isHookCalled()) {
|
||||
prepareCommitMessageHook();
|
||||
} else {
|
||||
// Check for first run and trigger setup wizard
|
||||
if (isFirstRun()) {
|
||||
const setupComplete = await runSetup();
|
||||
if (!setupComplete) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Check for missing API key and prompt if needed
|
||||
const hasApiKey = await promptForMissingApiKey();
|
||||
if (!hasApiKey) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
commit(extraArgs, flags.context, false, flags.fgm, flags.yes);
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export enum COMMANDS {
|
||||
config = 'config',
|
||||
hook = 'hook',
|
||||
commitlint = 'commitlint'
|
||||
commitlint = 'commitlint',
|
||||
setup = 'setup'
|
||||
}
|
||||
|
||||
@@ -68,10 +68,11 @@ export const MODEL_LIST = {
|
||||
],
|
||||
|
||||
anthropic: [
|
||||
'claude-3-5-sonnet-20240620',
|
||||
'claude-3-opus-20240229',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-haiku-20240307'
|
||||
'claude-sonnet-4-20250514',
|
||||
'claude-opus-4-20250514',
|
||||
'claude-3-7-sonnet-20250219',
|
||||
'claude-3-5-sonnet-20241022',
|
||||
'claude-3-5-haiku-20241022'
|
||||
],
|
||||
|
||||
gemini: [
|
||||
@@ -846,6 +847,33 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
OPENROUTER = 'openrouter'
|
||||
}
|
||||
|
||||
export const PROVIDER_API_KEY_URLS: Record<string, string | null> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'https://platform.openai.com/api-keys',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]: 'https://console.anthropic.com/settings/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'https://aistudio.google.com/app/apikey',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'https://console.groq.com/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'https://console.mistral.ai/api-keys/',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'https://platform.deepseek.com/api_keys',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'https://openrouter.ai/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'https://aimlapi.com/app/keys',
|
||||
[OCO_AI_PROVIDER_ENUM.AZURE]: 'https://portal.azure.com/',
|
||||
[OCO_AI_PROVIDER_ENUM.OLLAMA]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.MLX]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.FLOWISE]: null,
|
||||
[OCO_AI_PROVIDER_ENUM.TEST]: null
|
||||
};
|
||||
|
||||
export const RECOMMENDED_MODELS: Record<string, string> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'gpt-4o-mini',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]: 'claude-sonnet-4-20250514',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'gemini-1.5-flash',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'llama3-70b-8192',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'mistral-small-latest',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'deepseek-chat',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'openai/gpt-4o-mini',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'gpt-4o-mini'
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_API_KEY]?: string;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
|
||||
450
src/commands/setup.ts
Normal file
@@ -0,0 +1,450 @@
|
||||
import { intro, outro, select, text, isCancel, spinner } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
import {
|
||||
CONFIG_KEYS,
|
||||
MODEL_LIST,
|
||||
OCO_AI_PROVIDER_ENUM,
|
||||
getConfig,
|
||||
setGlobalConfig,
|
||||
getGlobalConfig,
|
||||
getIsGlobalConfigFileExist,
|
||||
DEFAULT_CONFIG,
|
||||
PROVIDER_API_KEY_URLS,
|
||||
RECOMMENDED_MODELS
|
||||
} from './config';
|
||||
import {
|
||||
fetchModelsForProvider,
|
||||
fetchOllamaModels
|
||||
} from '../utils/modelCache';
|
||||
|
||||
const PROVIDER_DISPLAY_NAMES: Record<string, string> = {
|
||||
[OCO_AI_PROVIDER_ENUM.OPENAI]: 'OpenAI (GPT-4o, GPT-4)',
|
||||
[OCO_AI_PROVIDER_ENUM.ANTHROPIC]: 'Anthropic (Claude Sonnet, Opus)',
|
||||
[OCO_AI_PROVIDER_ENUM.OLLAMA]: 'Ollama (Free, runs locally)',
|
||||
[OCO_AI_PROVIDER_ENUM.GEMINI]: 'Google Gemini',
|
||||
[OCO_AI_PROVIDER_ENUM.GROQ]: 'Groq (Fast inference, free tier)',
|
||||
[OCO_AI_PROVIDER_ENUM.MISTRAL]: 'Mistral AI',
|
||||
[OCO_AI_PROVIDER_ENUM.DEEPSEEK]: 'DeepSeek',
|
||||
[OCO_AI_PROVIDER_ENUM.OPENROUTER]: 'OpenRouter (Multiple providers)',
|
||||
[OCO_AI_PROVIDER_ENUM.AIMLAPI]: 'AI/ML API',
|
||||
[OCO_AI_PROVIDER_ENUM.AZURE]: 'Azure OpenAI',
|
||||
[OCO_AI_PROVIDER_ENUM.MLX]: 'MLX (Apple Silicon, local)'
|
||||
};
|
||||
|
||||
const PRIMARY_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.OPENAI,
|
||||
OCO_AI_PROVIDER_ENUM.ANTHROPIC,
|
||||
OCO_AI_PROVIDER_ENUM.OLLAMA
|
||||
];
|
||||
|
||||
const OTHER_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.GEMINI,
|
||||
OCO_AI_PROVIDER_ENUM.GROQ,
|
||||
OCO_AI_PROVIDER_ENUM.MISTRAL,
|
||||
OCO_AI_PROVIDER_ENUM.DEEPSEEK,
|
||||
OCO_AI_PROVIDER_ENUM.OPENROUTER,
|
||||
OCO_AI_PROVIDER_ENUM.AIMLAPI,
|
||||
OCO_AI_PROVIDER_ENUM.AZURE,
|
||||
OCO_AI_PROVIDER_ENUM.MLX
|
||||
];
|
||||
|
||||
const NO_API_KEY_PROVIDERS = [
|
||||
OCO_AI_PROVIDER_ENUM.OLLAMA,
|
||||
OCO_AI_PROVIDER_ENUM.MLX
|
||||
];
|
||||
|
||||
async function selectProvider(): Promise<string | symbol> {
|
||||
const primaryOptions = PRIMARY_PROVIDERS.map((provider) => ({
|
||||
value: provider,
|
||||
label: PROVIDER_DISPLAY_NAMES[provider] || provider
|
||||
}));
|
||||
|
||||
primaryOptions.push({
|
||||
value: 'other',
|
||||
label: 'Other providers...'
|
||||
});
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select your AI provider:',
|
||||
options: primaryOptions
|
||||
});
|
||||
|
||||
if (isCancel(selection)) return selection;
|
||||
|
||||
if (selection === 'other') {
|
||||
const otherOptions = OTHER_PROVIDERS.map((provider) => ({
|
||||
value: provider,
|
||||
label: PROVIDER_DISPLAY_NAMES[provider] || provider
|
||||
}));
|
||||
|
||||
return await select({
|
||||
message: 'Select provider:',
|
||||
options: otherOptions
|
||||
});
|
||||
}
|
||||
|
||||
return selection;
|
||||
}
|
||||
|
||||
async function getApiKey(provider: string): Promise<string | symbol> {
|
||||
const url = PROVIDER_API_KEY_URLS[provider as keyof typeof PROVIDER_API_KEY_URLS];
|
||||
|
||||
let message = `Enter your ${provider} API key:`;
|
||||
if (url) {
|
||||
message = `Enter your API key:\n${chalk.dim(` Get your key at: ${url}`)}`;
|
||||
}
|
||||
|
||||
return await text({
|
||||
message,
|
||||
placeholder: 'sk-...',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'API key is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function selectModel(
|
||||
provider: string,
|
||||
apiKey?: string
|
||||
): Promise<string | symbol> {
|
||||
const loadingSpinner = spinner();
|
||||
loadingSpinner.start('Fetching available models...');
|
||||
|
||||
let models: string[] = [];
|
||||
|
||||
try {
|
||||
models = await fetchModelsForProvider(provider, apiKey);
|
||||
} catch {
|
||||
// Fall back to hardcoded list
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
models = MODEL_LIST[providerKey] || [];
|
||||
}
|
||||
|
||||
loadingSpinner.stop('Models loaded');
|
||||
|
||||
if (models.length === 0) {
|
||||
// For Ollama/MLX, prompt for manual entry
|
||||
if (NO_API_KEY_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
|
||||
return await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Use default from config
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
return MODEL_LIST[providerKey]?.[0] || 'gpt-4o-mini';
|
||||
}
|
||||
|
||||
// Get recommended model for this provider
|
||||
const recommended = RECOMMENDED_MODELS[provider as keyof typeof RECOMMENDED_MODELS];
|
||||
|
||||
// Build options with recommended first
|
||||
const options: Array<{ value: string; label: string }> = [];
|
||||
|
||||
if (recommended && models.includes(recommended)) {
|
||||
options.push({
|
||||
value: recommended,
|
||||
label: `${recommended} (Recommended)`
|
||||
});
|
||||
}
|
||||
|
||||
// Add other models (first 10, excluding recommended)
|
||||
const otherModels = models
|
||||
.filter((m) => m !== recommended)
|
||||
.slice(0, 10);
|
||||
|
||||
otherModels.forEach((model) => {
|
||||
options.push({ value: model, label: model });
|
||||
});
|
||||
|
||||
// Add option to see all or enter custom
|
||||
if (models.length > 11) {
|
||||
options.push({ value: '__show_all__', label: 'Show all models...' });
|
||||
}
|
||||
options.push({ value: '__custom__', label: 'Enter custom model...' });
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select a model:',
|
||||
options
|
||||
});
|
||||
|
||||
if (isCancel(selection)) return selection;
|
||||
|
||||
if (selection === '__show_all__') {
|
||||
const allOptions = models.map((model) => ({
|
||||
value: model,
|
||||
label: model === recommended ? `${model} (Recommended)` : model
|
||||
}));
|
||||
|
||||
return await select({
|
||||
message: 'Select a model:',
|
||||
options: allOptions
|
||||
});
|
||||
}
|
||||
|
||||
if (selection === '__custom__') {
|
||||
return await text({
|
||||
message: 'Enter model name:',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return selection;
|
||||
}
|
||||
|
||||
async function setupOllama(): Promise<{
|
||||
provider: string;
|
||||
model: string;
|
||||
apiUrl: string;
|
||||
} | null> {
|
||||
console.log(chalk.cyan('\n Ollama - Free Local AI\n'));
|
||||
console.log(chalk.dim(' Setup steps:'));
|
||||
console.log(chalk.dim(' 1. Install: https://ollama.ai/download'));
|
||||
console.log(chalk.dim(' 2. Pull a model: ollama pull llama3:8b'));
|
||||
console.log(chalk.dim(' 3. Start server: ollama serve\n'));
|
||||
|
||||
// Try to fetch available models
|
||||
const loadingSpinner = spinner();
|
||||
loadingSpinner.start('Checking for local Ollama installation...');
|
||||
|
||||
const defaultUrl = 'http://localhost:11434';
|
||||
let ollamaModels: string[] = [];
|
||||
|
||||
try {
|
||||
ollamaModels = await fetchOllamaModels(defaultUrl);
|
||||
if (ollamaModels.length > 0) {
|
||||
loadingSpinner.stop(
|
||||
`${chalk.green('✔')} Found ${ollamaModels.length} local model(s)`
|
||||
);
|
||||
} else {
|
||||
loadingSpinner.stop(
|
||||
chalk.yellow(
|
||||
'Ollama is running but no models found. Pull a model first: ollama pull llama3:8b'
|
||||
)
|
||||
);
|
||||
}
|
||||
} catch {
|
||||
loadingSpinner.stop(
|
||||
chalk.yellow(
|
||||
'Could not connect to Ollama. Make sure it is running: ollama serve'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Model selection
|
||||
let model: string | symbol;
|
||||
if (ollamaModels.length > 0) {
|
||||
model = await select({
|
||||
message: 'Select a model:',
|
||||
options: [
|
||||
...ollamaModels.map((m) => ({ value: m, label: m })),
|
||||
{ value: '__custom__', label: 'Enter custom model name...' }
|
||||
]
|
||||
});
|
||||
|
||||
if (isCancel(model)) return null;
|
||||
|
||||
if (model === '__custom__') {
|
||||
model = await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b'
|
||||
});
|
||||
}
|
||||
} else {
|
||||
model = await text({
|
||||
message: 'Enter model name (e.g., llama3:8b, mistral):',
|
||||
placeholder: 'llama3:8b',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (isCancel(model)) return null;
|
||||
|
||||
// API URL (optional)
|
||||
const apiUrl = await text({
|
||||
message: 'Ollama URL (press Enter for default):',
|
||||
placeholder: defaultUrl,
|
||||
defaultValue: defaultUrl
|
||||
});
|
||||
|
||||
if (isCancel(apiUrl)) return null;
|
||||
|
||||
return {
|
||||
provider: OCO_AI_PROVIDER_ENUM.OLLAMA,
|
||||
model: model as string,
|
||||
apiUrl: (apiUrl as string) || defaultUrl
|
||||
};
|
||||
}
|
||||
|
||||
export async function runSetup(): Promise<boolean> {
|
||||
intro(chalk.bgCyan(' Welcome to OpenCommit! '));
|
||||
|
||||
// Select provider
|
||||
const provider = await selectProvider();
|
||||
if (isCancel(provider)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
let config: Partial<Record<string, any>> = {};
|
||||
|
||||
// Handle Ollama specially
|
||||
if (provider === OCO_AI_PROVIDER_ENUM.OLLAMA) {
|
||||
const ollamaConfig = await setupOllama();
|
||||
if (!ollamaConfig) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: ollamaConfig.provider,
|
||||
OCO_MODEL: ollamaConfig.model,
|
||||
OCO_API_URL: ollamaConfig.apiUrl,
|
||||
OCO_API_KEY: 'ollama' // Placeholder
|
||||
};
|
||||
} else if (provider === OCO_AI_PROVIDER_ENUM.MLX) {
|
||||
// MLX setup
|
||||
console.log(chalk.cyan('\n MLX - Apple Silicon Local AI\n'));
|
||||
console.log(chalk.dim(' MLX runs locally on Apple Silicon Macs.'));
|
||||
console.log(chalk.dim(' No API key required.\n'));
|
||||
|
||||
const model = await text({
|
||||
message: 'Enter model name:',
|
||||
placeholder: 'mlx-community/Llama-3-8B-Instruct-4bit'
|
||||
});
|
||||
|
||||
if (isCancel(model)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.MLX,
|
||||
OCO_MODEL: model,
|
||||
OCO_API_KEY: 'mlx' // Placeholder
|
||||
};
|
||||
} else {
|
||||
// Standard provider flow: API key then model
|
||||
const apiKey = await getApiKey(provider as string);
|
||||
if (isCancel(apiKey)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
const model = await selectModel(provider as string, apiKey as string);
|
||||
if (isCancel(model)) {
|
||||
outro('Setup cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
config = {
|
||||
OCO_AI_PROVIDER: provider,
|
||||
OCO_API_KEY: apiKey,
|
||||
OCO_MODEL: model
|
||||
};
|
||||
}
|
||||
|
||||
// Save configuration
|
||||
const existingConfig = getIsGlobalConfigFileExist()
|
||||
? getGlobalConfig()
|
||||
: DEFAULT_CONFIG;
|
||||
|
||||
const newConfig = {
|
||||
...existingConfig,
|
||||
...config
|
||||
};
|
||||
|
||||
setGlobalConfig(newConfig as any);
|
||||
|
||||
outro(
|
||||
`${chalk.green('✔')} Configuration saved to ~/.opencommit\n\n Run ${chalk.cyan('oco')} to generate commit messages!`
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function isFirstRun(): boolean {
|
||||
if (!getIsGlobalConfigFileExist()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
// Check if API key is missing for providers that need it
|
||||
const provider = config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
|
||||
|
||||
if (NO_API_KEY_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
|
||||
// For Ollama/MLX, check if model is set
|
||||
return !config.OCO_MODEL;
|
||||
}
|
||||
|
||||
// For other providers, check if API key is set
|
||||
return !config.OCO_API_KEY;
|
||||
}
|
||||
|
||||
export async function promptForMissingApiKey(): Promise<boolean> {
|
||||
const config = getConfig();
|
||||
const provider = config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
|
||||
|
||||
if (NO_API_KEY_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
|
||||
return true; // No API key needed
|
||||
}
|
||||
|
||||
if (config.OCO_API_KEY) {
|
||||
return true; // Already has key
|
||||
}
|
||||
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`\nAPI key missing for ${provider}. Let's set it up.\n`
|
||||
)
|
||||
);
|
||||
|
||||
const apiKey = await getApiKey(provider);
|
||||
if (isCancel(apiKey)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_API_KEY: apiKey as string
|
||||
} as any);
|
||||
|
||||
console.log(chalk.green('✔') + ' API key saved\n');
|
||||
return true;
|
||||
}
|
||||
|
||||
export const setupCommand = command(
|
||||
{
|
||||
name: COMMANDS.setup,
|
||||
help: {
|
||||
description: 'Interactive setup wizard for OpenCommit'
|
||||
}
|
||||
},
|
||||
async () => {
|
||||
await runSetup();
|
||||
}
|
||||
);
|
||||
@@ -8,6 +8,7 @@ import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { ModelNotFoundError } from '../utils/errors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -59,6 +60,20 @@ export class AnthropicEngine implements AiEngine {
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
|
||||
// Check for model not found errors
|
||||
if (err.message?.toLowerCase().includes('model') &&
|
||||
(err.message?.toLowerCase().includes('not found') ||
|
||||
err.message?.toLowerCase().includes('does not exist') ||
|
||||
err.message?.toLowerCase().includes('invalid'))) {
|
||||
throw new ModelNotFoundError(this.config.model, 'anthropic', 404);
|
||||
}
|
||||
|
||||
// Check for 404 errors
|
||||
if ('status' in (error as any) && (error as any).status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'anthropic', 404);
|
||||
}
|
||||
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
if (
|
||||
@@ -73,6 +88,11 @@ export class AnthropicEngine implements AiEngine {
|
||||
);
|
||||
}
|
||||
|
||||
// Check axios 404 errors
|
||||
if (axios.isAxiosError(error) && error.response?.status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'anthropic', 404);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from '@google/generative-ai';
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { ModelNotFoundError } from '../utils/errors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -76,6 +77,15 @@ export class GeminiEngine implements AiEngine {
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
|
||||
// Check for model not found errors
|
||||
if (err.message?.toLowerCase().includes('model') &&
|
||||
(err.message?.toLowerCase().includes('not found') ||
|
||||
err.message?.toLowerCase().includes('does not exist') ||
|
||||
err.message?.toLowerCase().includes('invalid'))) {
|
||||
throw new ModelNotFoundError(this.config.model, 'gemini', 404);
|
||||
}
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
@@ -84,6 +94,11 @@ export class GeminiEngine implements AiEngine {
|
||||
if (geminiError) throw new Error(geminiError?.message);
|
||||
}
|
||||
|
||||
// Check axios 404 errors
|
||||
if (axios.isAxiosError(error) && error.response?.status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'gemini', 404);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { ModelNotFoundError } from '../utils/errors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -46,6 +47,20 @@ export class OllamaEngine implements AiEngine {
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
|
||||
// Check for model not found errors
|
||||
if (message?.toLowerCase().includes('model') &&
|
||||
(message?.toLowerCase().includes('not found') ||
|
||||
message?.toLowerCase().includes('does not exist') ||
|
||||
message?.toLowerCase().includes('pull'))) {
|
||||
throw new ModelNotFoundError(this.config.model, 'ollama', 404);
|
||||
}
|
||||
|
||||
// Check for 404 status
|
||||
if (err.response?.status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'ollama', 404);
|
||||
}
|
||||
|
||||
throw new Error(`Ollama provider error: ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { parseCustomHeaders } from '../utils/engine';
|
||||
import { ModelNotFoundError } from '../utils/errors';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
@@ -62,6 +63,20 @@ export class OpenAiEngine implements AiEngine {
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
|
||||
// Check for model not found errors
|
||||
if (err.message?.toLowerCase().includes('model') &&
|
||||
(err.message?.toLowerCase().includes('not found') ||
|
||||
err.message?.toLowerCase().includes('does not exist') ||
|
||||
err.message?.toLowerCase().includes('invalid'))) {
|
||||
throw new ModelNotFoundError(this.config.model, 'openai', 404);
|
||||
}
|
||||
|
||||
// Check for 404 errors from API
|
||||
if ('status' in (error as any) && (error as any).status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'openai', 404);
|
||||
}
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
@@ -71,6 +86,11 @@ export class OpenAiEngine implements AiEngine {
|
||||
if (openAiError) throw new Error(openAiError.message);
|
||||
}
|
||||
|
||||
// Check axios 404 errors
|
||||
if (axios.isAxiosError(error) && error.response?.status === 404) {
|
||||
throw new ModelNotFoundError(this.config.model, 'openai', 404);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,7 +1,21 @@
|
||||
import { select, confirm, isCancel } from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { DEFAULT_TOKEN_LIMITS, getConfig } from './commands/config';
|
||||
import {
|
||||
DEFAULT_TOKEN_LIMITS,
|
||||
getConfig,
|
||||
setGlobalConfig,
|
||||
getGlobalConfig,
|
||||
MODEL_LIST,
|
||||
RECOMMENDED_MODELS
|
||||
} from './commands/config';
|
||||
import { getMainCommitPrompt } from './prompts';
|
||||
import { getEngine } from './utils/engine';
|
||||
import {
|
||||
isModelNotFoundError,
|
||||
getSuggestedModels,
|
||||
ModelNotFoundError
|
||||
} from './utils/errors';
|
||||
import { mergeDiffs } from './utils/mergeDiffs';
|
||||
import { tokenCount } from './utils/tokenCount';
|
||||
|
||||
@@ -36,13 +50,106 @@ export enum GenerateCommitMessageErrorEnum {
|
||||
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
|
||||
}
|
||||
|
||||
async function handleModelNotFoundError(
|
||||
error: Error,
|
||||
provider: string,
|
||||
currentModel: string
|
||||
): Promise<string | null> {
|
||||
console.log(
|
||||
chalk.red(`\n✖ Model '${currentModel}' not found\n`)
|
||||
);
|
||||
|
||||
const suggestedModels = getSuggestedModels(provider, currentModel);
|
||||
const recommended =
|
||||
RECOMMENDED_MODELS[provider as keyof typeof RECOMMENDED_MODELS];
|
||||
|
||||
if (suggestedModels.length === 0) {
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
`No alternative models available. Run 'oco setup' to configure a different model.`
|
||||
)
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const options: Array<{ value: string; label: string }> = [];
|
||||
|
||||
// Add recommended first if available
|
||||
if (recommended && suggestedModels.includes(recommended)) {
|
||||
options.push({
|
||||
value: recommended,
|
||||
label: `${recommended} (Recommended)`
|
||||
});
|
||||
}
|
||||
|
||||
// Add other suggestions
|
||||
suggestedModels
|
||||
.filter((m) => m !== recommended)
|
||||
.forEach((model) => {
|
||||
options.push({ value: model, label: model });
|
||||
});
|
||||
|
||||
options.push({ value: '__custom__', label: 'Enter custom model...' });
|
||||
|
||||
const selection = await select({
|
||||
message: 'Select an alternative model:',
|
||||
options
|
||||
});
|
||||
|
||||
if (isCancel(selection)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let newModel: string;
|
||||
if (selection === '__custom__') {
|
||||
const { text } = await import('@clack/prompts');
|
||||
const customModel = await text({
|
||||
message: 'Enter model name:',
|
||||
validate: (value) => {
|
||||
if (!value || value.trim().length === 0) {
|
||||
return 'Model name is required';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
});
|
||||
|
||||
if (isCancel(customModel)) {
|
||||
return null;
|
||||
}
|
||||
newModel = customModel as string;
|
||||
} else {
|
||||
newModel = selection as string;
|
||||
}
|
||||
|
||||
// Ask if user wants to save as default
|
||||
const saveAsDefault = await confirm({
|
||||
message: 'Save as default model?'
|
||||
});
|
||||
|
||||
if (!isCancel(saveAsDefault) && saveAsDefault) {
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_MODEL: newModel
|
||||
} as any);
|
||||
console.log(chalk.green('✔') + ' Model saved as default\n');
|
||||
}
|
||||
|
||||
return newModel;
|
||||
}
|
||||
|
||||
const ADJUSTMENT_FACTOR = 20;
|
||||
|
||||
export const generateCommitMessageByDiff = async (
|
||||
diff: string,
|
||||
fullGitMojiSpec: boolean = false,
|
||||
context: string = ''
|
||||
context: string = '',
|
||||
retryWithModel?: string
|
||||
): Promise<string> => {
|
||||
const currentConfig = getConfig();
|
||||
const provider = currentConfig.OCO_AI_PROVIDER || 'openai';
|
||||
const currentModel = retryWithModel || currentConfig.OCO_MODEL;
|
||||
|
||||
try {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
|
||||
fullGitMojiSpec,
|
||||
@@ -89,6 +196,32 @@ export const generateCommitMessageByDiff = async (
|
||||
|
||||
return commitMessage;
|
||||
} catch (error) {
|
||||
// Handle model-not-found errors with interactive recovery
|
||||
if (isModelNotFoundError(error)) {
|
||||
const newModel = await handleModelNotFoundError(
|
||||
error as Error,
|
||||
provider,
|
||||
currentModel
|
||||
);
|
||||
|
||||
if (newModel) {
|
||||
console.log(chalk.cyan(`Retrying with ${newModel}...\n`));
|
||||
// Retry with the new model by updating config temporarily
|
||||
const existingConfig = getGlobalConfig();
|
||||
setGlobalConfig({
|
||||
...existingConfig,
|
||||
OCO_MODEL: newModel
|
||||
} as any);
|
||||
|
||||
return generateCommitMessageByDiff(
|
||||
diff,
|
||||
fullGitMojiSpec,
|
||||
context,
|
||||
newModel
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
166
src/utils/errors.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { MODEL_LIST, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
|
||||
export class ModelNotFoundError extends Error {
|
||||
public readonly modelName: string;
|
||||
public readonly provider: string;
|
||||
public readonly statusCode: number;
|
||||
|
||||
constructor(modelName: string, provider: string, statusCode: number = 404) {
|
||||
super(`Model '${modelName}' not found for provider '${provider}'`);
|
||||
this.name = 'ModelNotFoundError';
|
||||
this.modelName = modelName;
|
||||
this.provider = provider;
|
||||
this.statusCode = statusCode;
|
||||
}
|
||||
}
|
||||
|
||||
export class ApiKeyMissingError extends Error {
|
||||
public readonly provider: string;
|
||||
|
||||
constructor(provider: string) {
|
||||
super(`API key is missing for provider '${provider}'`);
|
||||
this.name = 'ApiKeyMissingError';
|
||||
this.provider = provider;
|
||||
}
|
||||
}
|
||||
|
||||
export function isModelNotFoundError(error: unknown): boolean {
|
||||
if (error instanceof ModelNotFoundError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// OpenAI error patterns
|
||||
if (
|
||||
message.includes('model') &&
|
||||
(message.includes('not found') ||
|
||||
message.includes('does not exist') ||
|
||||
message.includes('invalid model'))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Anthropic error patterns
|
||||
if (
|
||||
message.includes('model') &&
|
||||
(message.includes('not found') || message.includes('invalid'))
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 404 status in axios/fetch errors
|
||||
if (
|
||||
'status' in (error as any) &&
|
||||
(error as any).status === 404 &&
|
||||
message.includes('model')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for response status
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 404) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function isApiKeyError(error: unknown): boolean {
|
||||
if (error instanceof ApiKeyMissingError) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
|
||||
// Common API key error patterns
|
||||
if (
|
||||
message.includes('api key') ||
|
||||
message.includes('apikey') ||
|
||||
message.includes('authentication') ||
|
||||
message.includes('unauthorized') ||
|
||||
message.includes('invalid_api_key') ||
|
||||
message.includes('incorrect api key')
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for 401 status
|
||||
if ('response' in (error as any)) {
|
||||
const response = (error as any).response;
|
||||
if (response?.status === 401) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
export function getSuggestedModels(
|
||||
provider: string,
|
||||
failedModel: string
|
||||
): string[] {
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
const models = MODEL_LIST[providerKey];
|
||||
|
||||
if (!models || !Array.isArray(models)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Return first 5 models as suggestions, excluding the failed one
|
||||
return models.filter((m) => m !== failedModel).slice(0, 5);
|
||||
}
|
||||
|
||||
export function getRecommendedModel(provider: string): string | null {
|
||||
switch (provider.toLowerCase()) {
|
||||
case OCO_AI_PROVIDER_ENUM.OPENAI:
|
||||
return 'gpt-4o-mini';
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
return 'claude-sonnet-4-20250514';
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
return 'gemini-1.5-flash';
|
||||
case OCO_AI_PROVIDER_ENUM.GROQ:
|
||||
return 'llama3-70b-8192';
|
||||
case OCO_AI_PROVIDER_ENUM.MISTRAL:
|
||||
return 'mistral-small-latest';
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
return 'deepseek-chat';
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
return 'openai/gpt-4o-mini';
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
return 'gpt-4o-mini';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatErrorWithRecovery(
|
||||
error: Error,
|
||||
provider: string,
|
||||
model: string
|
||||
): string {
|
||||
const suggestions = getSuggestedModels(provider, model);
|
||||
const recommended = getRecommendedModel(provider);
|
||||
|
||||
let message = `\n${error.message}\n`;
|
||||
|
||||
if (suggestions.length > 0) {
|
||||
message += '\nSuggested alternatives:\n';
|
||||
suggestions.forEach((m, i) => {
|
||||
const isRecommended = m === recommended;
|
||||
message += ` ${i + 1}. ${m}${isRecommended ? ' (Recommended)' : ''}\n`;
|
||||
});
|
||||
}
|
||||
|
||||
message += '\nTo fix this, run: oco config set OCO_MODEL=<model-name>\n';
|
||||
message += 'Or run: oco setup\n';
|
||||
|
||||
return message;
|
||||
}
|
||||
170
src/utils/modelCache.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { MODEL_LIST, OCO_AI_PROVIDER_ENUM } from '../commands/config';
|
||||
|
||||
const MODEL_CACHE_PATH = pathJoin(homedir(), '.opencommit-models.json');
|
||||
const CACHE_TTL_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
|
||||
|
||||
interface ModelCache {
|
||||
timestamp: number;
|
||||
models: Record<string, string[]>;
|
||||
}
|
||||
|
||||
function readCache(): ModelCache | null {
|
||||
try {
|
||||
if (!existsSync(MODEL_CACHE_PATH)) {
|
||||
return null;
|
||||
}
|
||||
const data = readFileSync(MODEL_CACHE_PATH, 'utf8');
|
||||
return JSON.parse(data);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function writeCache(models: Record<string, string[]>): void {
|
||||
try {
|
||||
const cache: ModelCache = {
|
||||
timestamp: Date.now(),
|
||||
models
|
||||
};
|
||||
writeFileSync(MODEL_CACHE_PATH, JSON.stringify(cache, null, 2), 'utf8');
|
||||
} catch {
|
||||
// Silently fail if we can't write cache
|
||||
}
|
||||
}
|
||||
|
||||
function isCacheValid(cache: ModelCache | null): boolean {
|
||||
if (!cache) return false;
|
||||
return Date.now() - cache.timestamp < CACHE_TTL_MS;
|
||||
}
|
||||
|
||||
export async function fetchOpenAIModels(apiKey: string): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch('https://api.openai.com/v1/models', {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`
|
||||
}
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.data
|
||||
.map((m: { id: string }) => m.id)
|
||||
.filter(
|
||||
(id: string) =>
|
||||
id.startsWith('gpt-') ||
|
||||
id.startsWith('o1') ||
|
||||
id.startsWith('o3') ||
|
||||
id.startsWith('o4')
|
||||
)
|
||||
.sort();
|
||||
|
||||
return models.length > 0 ? models : MODEL_LIST.openai;
|
||||
} catch {
|
||||
return MODEL_LIST.openai;
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchOllamaModels(
|
||||
baseUrl: string = 'http://localhost:11434'
|
||||
): Promise<string[]> {
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/api/tags`);
|
||||
|
||||
if (!response.ok) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data.models?.map((m: { name: string }) => m.name) || [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetchModelsForProvider(
|
||||
provider: string,
|
||||
apiKey?: string,
|
||||
baseUrl?: string
|
||||
): Promise<string[]> {
|
||||
const cache = readCache();
|
||||
|
||||
// Return cached models if valid
|
||||
if (isCacheValid(cache) && cache!.models[provider]) {
|
||||
return cache!.models[provider];
|
||||
}
|
||||
|
||||
let models: string[] = [];
|
||||
|
||||
switch (provider.toLowerCase()) {
|
||||
case OCO_AI_PROVIDER_ENUM.OPENAI:
|
||||
if (apiKey) {
|
||||
models = await fetchOpenAIModels(apiKey);
|
||||
} else {
|
||||
models = MODEL_LIST.openai;
|
||||
}
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OLLAMA:
|
||||
models = await fetchOllamaModels(baseUrl);
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.ANTHROPIC:
|
||||
models = MODEL_LIST.anthropic;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GEMINI:
|
||||
models = MODEL_LIST.gemini;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.GROQ:
|
||||
models = MODEL_LIST.groq;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.MISTRAL:
|
||||
models = MODEL_LIST.mistral;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
|
||||
models = MODEL_LIST.deepseek;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
models = MODEL_LIST.aimlapi;
|
||||
break;
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
models = MODEL_LIST.openrouter;
|
||||
break;
|
||||
|
||||
default:
|
||||
models = MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
// Update cache
|
||||
const existingCache = cache?.models || {};
|
||||
existingCache[provider] = models;
|
||||
writeCache(existingCache);
|
||||
|
||||
return models;
|
||||
}
|
||||
|
||||
export function getModelsForProvider(provider: string): string[] {
|
||||
const providerKey = provider.toLowerCase() as keyof typeof MODEL_LIST;
|
||||
return MODEL_LIST[providerKey] || MODEL_LIST.openai;
|
||||
}
|
||||
|
||||
export function clearModelCache(): void {
|
||||
try {
|
||||
if (existsSync(MODEL_CACHE_PATH)) {
|
||||
writeFileSync(MODEL_CACHE_PATH, '{}', 'utf8');
|
||||
}
|
||||
} catch {
|
||||
// Silently fail
|
||||
}
|
||||
}
|
||||