mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-10 06:08:16 -05:00
Merge branch 'master' into dev
This commit is contained in:
8
.github/workflows/codeql.yml
vendored
8
.github/workflows/codeql.yml
vendored
@@ -40,11 +40,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -71,6 +71,6 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
4
.github/workflows/dependency-review.yml
vendored
4
.github/workflows/dependency-review.yml
vendored
@@ -15,6 +15,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v2
|
||||
uses: actions/dependency-review-action@v3
|
||||
|
||||
35
.github/workflows/test.yml
vendored
35
.github/workflows/test.yml
vendored
@@ -1,6 +1,11 @@
|
||||
name: Testing
|
||||
|
||||
on: [pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
@@ -9,11 +14,12 @@ jobs:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Run Unit Tests
|
||||
@@ -24,11 +30,12 @@ jobs:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
- name: Install git
|
||||
run: |
|
||||
sudo apt-get update
|
||||
@@ -44,3 +51,21 @@ jobs:
|
||||
run: npm run build
|
||||
- name: Run E2E Tests
|
||||
run: npm run test:e2e
|
||||
prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Run Prettier
|
||||
run: npm run format:check
|
||||
- name: Prettier Output
|
||||
if: failure()
|
||||
run: |
|
||||
echo "Prettier check failed. Please run 'npm run format' to fix formatting issues."
|
||||
exit 1
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,4 +11,5 @@ uncaughtExceptions.log
|
||||
src/*.json
|
||||
.idea
|
||||
test.ts
|
||||
notes.md
|
||||
notes.md
|
||||
.nvmrc
|
||||
15
README.md
15
README.md
@@ -109,11 +109,12 @@ Create a `.env` file and add OpenCommit config variables there like this:
|
||||
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi>
|
||||
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
|
||||
OCO_API_URL=<may be used to set proxy path to OpenAI api>
|
||||
OCO_API_CUSTOM_HEADERS=<JSON string of custom HTTP headers to include in API requests>
|
||||
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>
|
||||
OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
|
||||
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
|
||||
OCO_EMOJI=<boolean, add GitMoji>
|
||||
OCO_MODEL=<either 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
|
||||
OCO_MODEL=<either 'gpt-4o-mini' (default), 'gpt-4o', 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo', 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview' or any Anthropic or Ollama model or any string basically, but it should be a valid model name>
|
||||
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
|
||||
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
|
||||
@@ -132,6 +133,18 @@ Simply set any of the variables above like this:
|
||||
oco config set OCO_MODEL=gpt-4o-mini
|
||||
```
|
||||
|
||||
To see all available configuration parameters and their accepted values:
|
||||
|
||||
```sh
|
||||
oco config describe
|
||||
```
|
||||
|
||||
To see details for a specific parameter:
|
||||
|
||||
```sh
|
||||
oco config describe OCO_MODEL
|
||||
```
|
||||
|
||||
Configure [GitMoji](https://gitmoji.dev/) to preface a message.
|
||||
|
||||
```sh
|
||||
|
||||
@@ -9,19 +9,33 @@ const config: Config = {
|
||||
testTimeout: 100_000,
|
||||
coverageProvider: 'v8',
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
preset: 'ts-jest/presets/js-with-ts-esm',
|
||||
preset: 'ts-jest/presets/default-esm',
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
|
||||
testEnvironment: 'node',
|
||||
testRegex: ['.*\\.test\\.ts$'],
|
||||
transformIgnorePatterns: ['node_modules/(?!cli-testing-library)'],
|
||||
// Tell Jest to ignore the specific duplicate package.json files
|
||||
// that are causing Haste module naming collisions
|
||||
modulePathIgnorePatterns: [
|
||||
'<rootDir>/test/e2e/prompt-module/data/'
|
||||
],
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(cli-testing-library|@clack|cleye)/.*)'
|
||||
],
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx)$': [
|
||||
'^.+\\.(ts|tsx|js|jsx|mjs)$': [
|
||||
'ts-jest',
|
||||
{
|
||||
diagnostics: false,
|
||||
useESM: true
|
||||
useESM: true,
|
||||
tsconfig: {
|
||||
module: 'ESNext',
|
||||
target: 'ES2022'
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
moduleNameMapper: {
|
||||
'^(\\.{1,2}/.*)\\.js$': '$1'
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
18074
out/cli.cjs
18074
out/cli.cjs
File diff suppressed because one or more lines are too long
27615
out/github-action.cjs
27615
out/github-action.cjs
File diff suppressed because one or more lines are too long
4345
package-lock.json
generated
4345
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
20
package.json
20
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.2.5",
|
||||
"version": "3.2.9",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -44,13 +44,14 @@
|
||||
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
|
||||
"dev": "ts-node ./src/cli.ts",
|
||||
"dev:gemini": "OCO_AI_PROVIDER='gemini' ts-node ./src/cli.ts",
|
||||
"build": "rimraf out && node esbuild.config.js",
|
||||
"build": "npx rimraf out && node esbuild.config.js",
|
||||
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
|
||||
"deploy": "npm publish --tag latest",
|
||||
"deploy:build": "npm run build:push && git push --tags && npm run deploy",
|
||||
"deploy:patch": "npm version patch && npm run deploy:build",
|
||||
"lint": "eslint src --ext ts && tsc --noEmit",
|
||||
"format": "prettier --write src",
|
||||
"format:check": "prettier --check src",
|
||||
"test": "node --no-warnings --experimental-vm-modules $( [ -f ./node_modules/.bin/jest ] && echo ./node_modules/.bin/jest || which jest ) test/unit",
|
||||
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
|
||||
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
|
||||
@@ -67,14 +68,15 @@
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^16.18.14",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.29.0",
|
||||
"@typescript-eslint/parser": "^8.29.0",
|
||||
"cli-testing-library": "^2.0.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"esbuild": "^0.15.18",
|
||||
"eslint": "^8.28.0",
|
||||
"esbuild": "^0.25.5",
|
||||
"eslint": "^9.24.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^2.8.4",
|
||||
"rimraf": "^6.0.1",
|
||||
"ts-jest": "^29.1.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^4.9.3"
|
||||
@@ -82,7 +84,7 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@actions/github": "^6.0.1",
|
||||
"@anthropic-ai/sdk": "^0.19.2",
|
||||
"@azure/openai": "^1.0.0-beta.12",
|
||||
"@clack/prompts": "^0.6.1",
|
||||
@@ -102,5 +104,9 @@
|
||||
"openai": "^4.57.0",
|
||||
"punycode": "^2.3.1",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"overrides": {
|
||||
"ajv": "^8.17.1",
|
||||
"whatwg-url": "^14.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,11 @@ cli(
|
||||
name: 'opencommit',
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
flags: {
|
||||
fgm: Boolean,
|
||||
fgm: {
|
||||
type: Boolean,
|
||||
description: 'Use full GitMoji specification',
|
||||
default: false
|
||||
},
|
||||
context: {
|
||||
type: String,
|
||||
alias: 'c',
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {
|
||||
text,
|
||||
confirm,
|
||||
intro,
|
||||
isCancel,
|
||||
@@ -85,15 +86,29 @@ ${commitMessage}
|
||||
${chalk.grey('——————————————————')}`
|
||||
);
|
||||
|
||||
const isCommitConfirmedByUser =
|
||||
skipCommitConfirmation ||
|
||||
(await confirm({
|
||||
message: 'Confirm the commit message?'
|
||||
}));
|
||||
const userAction = skipCommitConfirmation
|
||||
? 'Yes'
|
||||
: await select({
|
||||
message: 'Confirm the commit message?',
|
||||
options: [
|
||||
{ value: 'Yes', label: 'Yes' },
|
||||
{ value: 'No', label: 'No' },
|
||||
{ value: 'Edit', label: 'Edit' }
|
||||
]
|
||||
});
|
||||
|
||||
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
|
||||
if (isCancel(userAction)) process.exit(1);
|
||||
|
||||
if (isCommitConfirmedByUser) {
|
||||
if (userAction === 'Edit') {
|
||||
const textResponse = await text({
|
||||
message: 'Please edit the commit message: (press Enter to continue)',
|
||||
initialValue: commitMessage
|
||||
});
|
||||
|
||||
commitMessage = textResponse.toString();
|
||||
}
|
||||
|
||||
if (userAction === 'Yes' || userAction === 'Edit') {
|
||||
const committingChangesSpinner = spinner();
|
||||
committingChangesSpinner.start('Committing the changes');
|
||||
const { stdout } = await execa('git', [
|
||||
@@ -138,7 +153,8 @@ ${chalk.grey('——————————————————')}`
|
||||
]);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green('✔')} Successfully pushed all commits to ${remotes[0]
|
||||
`${chalk.green('✔')} Successfully pushed all commits to ${
|
||||
remotes[0]
|
||||
}`
|
||||
);
|
||||
|
||||
@@ -148,23 +164,26 @@ ${chalk.grey('——————————————————')}`
|
||||
process.exit(0);
|
||||
}
|
||||
} else {
|
||||
const skipOption = `don't push`
|
||||
const skipOption = `don't push`;
|
||||
const selectedRemote = (await select({
|
||||
message: 'Choose a remote to push to',
|
||||
options: [...remotes, skipOption].map((remote) => ({ value: remote, label: remote })),
|
||||
options: [...remotes, skipOption].map((remote) => ({
|
||||
value: remote,
|
||||
label: remote
|
||||
}))
|
||||
})) as string;
|
||||
|
||||
if (isCancel(selectedRemote)) process.exit(1);
|
||||
|
||||
if (selectedRemote !== skipOption) {
|
||||
const pushSpinner = spinner();
|
||||
|
||||
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
@@ -235,8 +254,9 @@ export async function commit(
|
||||
|
||||
stagedFilesSpinner.start('Counting staged files');
|
||||
|
||||
if (!stagedFiles.length) {
|
||||
if (stagedFiles.length === 0) {
|
||||
stagedFilesSpinner.stop('No files are staged');
|
||||
|
||||
const isStageAllAndCommitConfirmedByUser = await confirm({
|
||||
message: 'Do you want to stage all files and generate commit message?'
|
||||
});
|
||||
@@ -245,7 +265,7 @@ export async function commit(
|
||||
|
||||
if (isStageAllAndCommitConfirmedByUser) {
|
||||
await commit(extraArgs, context, true, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (stagedFiles.length === 0 && changedFiles.length > 0) {
|
||||
@@ -257,13 +277,13 @@ export async function commit(
|
||||
}))
|
||||
})) as string[];
|
||||
|
||||
if (isCancel(files)) process.exit(1);
|
||||
if (isCancel(files)) process.exit(0);
|
||||
|
||||
await gitAdd({ files });
|
||||
}
|
||||
|
||||
await commit(extraArgs, context, false, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
stagedFilesSpinner.stop(
|
||||
|
||||
@@ -25,13 +25,16 @@ export enum CONFIG_KEYS {
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
|
||||
OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE',
|
||||
OCO_API_URL = 'OCO_API_URL',
|
||||
OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS',
|
||||
OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate
|
||||
OCO_GITPUSH = 'OCO_GITPUSH', // todo: deprecate
|
||||
OCO_HOOK_AUTO_UNCOMMENT = 'OCO_HOOK_AUTO_UNCOMMENT'
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
get = 'get',
|
||||
set = 'set'
|
||||
set = 'set',
|
||||
describe = 'describe'
|
||||
}
|
||||
|
||||
export const MODEL_LIST = {
|
||||
@@ -234,8 +237,339 @@ export const MODEL_LIST = {
|
||||
'perplexity/sonar-pro',
|
||||
'x-ai/grok-4-07-09',
|
||||
'x-ai/grok-3-beta',
|
||||
'x-ai/grok-3-mini-beta',
|
||||
'x-ai/grok-3-mini-beta'
|
||||
],
|
||||
|
||||
// OpenRouter available models
|
||||
// input_modalities: 'text'
|
||||
// output_modalities: 'text'
|
||||
// https://openrouter.ai/api/v1/models
|
||||
openrouter: [
|
||||
'openai/gpt-4o-mini', // used by default
|
||||
'01-ai/yi-large',
|
||||
'aetherwiing/mn-starcannon-12b',
|
||||
'agentica-org/deepcoder-14b-preview:free',
|
||||
'ai21/jamba-1.6-large',
|
||||
'ai21/jamba-1.6-mini',
|
||||
'aion-labs/aion-1.0',
|
||||
'aion-labs/aion-1.0-mini',
|
||||
'aion-labs/aion-rp-llama-3.1-8b',
|
||||
'alfredpros/codellama-7b-instruct-solidity',
|
||||
'all-hands/openhands-lm-32b-v0.1',
|
||||
'alpindale/goliath-120b',
|
||||
'alpindale/magnum-72b',
|
||||
'amazon/nova-lite-v1',
|
||||
'amazon/nova-micro-v1',
|
||||
'amazon/nova-pro-v1',
|
||||
'anthracite-org/magnum-v2-72b',
|
||||
'anthracite-org/magnum-v4-72b',
|
||||
'anthropic/claude-2',
|
||||
'anthropic/claude-2.0',
|
||||
'anthropic/claude-2.0:beta',
|
||||
'anthropic/claude-2.1',
|
||||
'anthropic/claude-2.1:beta',
|
||||
'anthropic/claude-2:beta',
|
||||
'anthropic/claude-3-haiku',
|
||||
'anthropic/claude-3-haiku:beta',
|
||||
'anthropic/claude-3-opus',
|
||||
'anthropic/claude-3-opus:beta',
|
||||
'anthropic/claude-3-sonnet',
|
||||
'anthropic/claude-3-sonnet:beta',
|
||||
'anthropic/claude-3.5-haiku',
|
||||
'anthropic/claude-3.5-haiku-20241022',
|
||||
'anthropic/claude-3.5-haiku-20241022:beta',
|
||||
'anthropic/claude-3.5-haiku:beta',
|
||||
'anthropic/claude-3.5-sonnet',
|
||||
'anthropic/claude-3.5-sonnet-20240620',
|
||||
'anthropic/claude-3.5-sonnet-20240620:beta',
|
||||
'anthropic/claude-3.5-sonnet:beta',
|
||||
'anthropic/claude-3.7-sonnet',
|
||||
'anthropic/claude-3.7-sonnet:beta',
|
||||
'anthropic/claude-3.7-sonnet:thinking',
|
||||
'anthropic/claude-opus-4',
|
||||
'anthropic/claude-sonnet-4',
|
||||
'arcee-ai/arcee-blitz',
|
||||
'arcee-ai/caller-large',
|
||||
'arcee-ai/coder-large',
|
||||
'arcee-ai/maestro-reasoning',
|
||||
'arcee-ai/spotlight',
|
||||
'arcee-ai/virtuoso-large',
|
||||
'arcee-ai/virtuoso-medium-v2',
|
||||
'arliai/qwq-32b-arliai-rpr-v1:free',
|
||||
'cognitivecomputations/dolphin-mixtral-8x22b',
|
||||
'cognitivecomputations/dolphin3.0-mistral-24b:free',
|
||||
'cognitivecomputations/dolphin3.0-r1-mistral-24b:free',
|
||||
'cohere/command',
|
||||
'cohere/command-a',
|
||||
'cohere/command-r',
|
||||
'cohere/command-r-03-2024',
|
||||
'cohere/command-r-08-2024',
|
||||
'cohere/command-r-plus',
|
||||
'cohere/command-r-plus-04-2024',
|
||||
'cohere/command-r-plus-08-2024',
|
||||
'cohere/command-r7b-12-2024',
|
||||
'deepseek/deepseek-chat',
|
||||
'deepseek/deepseek-chat-v3-0324',
|
||||
'deepseek/deepseek-chat-v3-0324:free',
|
||||
'deepseek/deepseek-chat:free',
|
||||
'deepseek/deepseek-prover-v2',
|
||||
'deepseek/deepseek-prover-v2:free',
|
||||
'deepseek/deepseek-r1',
|
||||
'deepseek/deepseek-r1-0528',
|
||||
'deepseek/deepseek-r1-0528-qwen3-8b',
|
||||
'deepseek/deepseek-r1-0528-qwen3-8b:free',
|
||||
'deepseek/deepseek-r1-0528:free',
|
||||
'deepseek/deepseek-r1-distill-llama-70b',
|
||||
'deepseek/deepseek-r1-distill-llama-70b:free',
|
||||
'deepseek/deepseek-r1-distill-llama-8b',
|
||||
'deepseek/deepseek-r1-distill-qwen-1.5b',
|
||||
'deepseek/deepseek-r1-distill-qwen-14b',
|
||||
'deepseek/deepseek-r1-distill-qwen-14b:free',
|
||||
'deepseek/deepseek-r1-distill-qwen-32b',
|
||||
'deepseek/deepseek-r1-distill-qwen-32b:free',
|
||||
'deepseek/deepseek-r1-distill-qwen-7b',
|
||||
'deepseek/deepseek-r1-zero:free',
|
||||
'deepseek/deepseek-r1:free',
|
||||
'deepseek/deepseek-v3-base:free',
|
||||
'eleutherai/llemma_7b',
|
||||
'eva-unit-01/eva-llama-3.33-70b',
|
||||
'eva-unit-01/eva-qwen-2.5-32b',
|
||||
'eva-unit-01/eva-qwen-2.5-72b',
|
||||
'featherless/qwerky-72b:free',
|
||||
'google/gemini-2.0-flash-001',
|
||||
'google/gemini-2.0-flash-exp:free',
|
||||
'google/gemini-2.0-flash-lite-001',
|
||||
'google/gemini-2.5-flash-preview',
|
||||
'google/gemini-2.5-flash-preview-05-20',
|
||||
'google/gemini-2.5-flash-preview-05-20:thinking',
|
||||
'google/gemini-2.5-flash-preview:thinking',
|
||||
'google/gemini-2.5-pro-exp-03-25',
|
||||
'google/gemini-2.5-pro-preview',
|
||||
'google/gemini-2.5-pro-preview-05-06',
|
||||
'google/gemini-flash-1.5',
|
||||
'google/gemini-flash-1.5-8b',
|
||||
'google/gemini-pro-1.5',
|
||||
'google/gemma-2-27b-it',
|
||||
'google/gemma-2-9b-it',
|
||||
'google/gemma-2-9b-it:free',
|
||||
'google/gemma-3-12b-it',
|
||||
'google/gemma-3-12b-it:free',
|
||||
'google/gemma-3-1b-it:free',
|
||||
'google/gemma-3-27b-it',
|
||||
'google/gemma-3-27b-it:free',
|
||||
'google/gemma-3-4b-it',
|
||||
'google/gemma-3-4b-it:free',
|
||||
'google/gemma-3n-e4b-it:free',
|
||||
'gryphe/mythomax-l2-13b',
|
||||
'inception/mercury-coder-small-beta',
|
||||
'infermatic/mn-inferor-12b',
|
||||
'inflection/inflection-3-pi',
|
||||
'inflection/inflection-3-productivity',
|
||||
'liquid/lfm-3b',
|
||||
'liquid/lfm-40b',
|
||||
'liquid/lfm-7b',
|
||||
'mancer/weaver',
|
||||
'meta-llama/llama-2-70b-chat',
|
||||
'meta-llama/llama-3-70b-instruct',
|
||||
'meta-llama/llama-3-8b-instruct',
|
||||
'meta-llama/llama-3.1-405b',
|
||||
'meta-llama/llama-3.1-405b-instruct',
|
||||
'meta-llama/llama-3.1-405b:free',
|
||||
'meta-llama/llama-3.1-70b-instruct',
|
||||
'meta-llama/llama-3.1-8b-instruct',
|
||||
'meta-llama/llama-3.1-8b-instruct:free',
|
||||
'meta-llama/llama-3.2-11b-vision-instruct',
|
||||
'meta-llama/llama-3.2-11b-vision-instruct:free',
|
||||
'meta-llama/llama-3.2-1b-instruct',
|
||||
'meta-llama/llama-3.2-1b-instruct:free',
|
||||
'meta-llama/llama-3.2-3b-instruct',
|
||||
'meta-llama/llama-3.2-3b-instruct:free',
|
||||
'meta-llama/llama-3.2-90b-vision-instruct',
|
||||
'meta-llama/llama-3.3-70b-instruct',
|
||||
'meta-llama/llama-3.3-70b-instruct:free',
|
||||
'meta-llama/llama-3.3-8b-instruct:free',
|
||||
'meta-llama/llama-4-maverick',
|
||||
'meta-llama/llama-4-maverick:free',
|
||||
'meta-llama/llama-4-scout',
|
||||
'meta-llama/llama-4-scout:free',
|
||||
'meta-llama/llama-guard-2-8b',
|
||||
'meta-llama/llama-guard-3-8b',
|
||||
'meta-llama/llama-guard-4-12b',
|
||||
'microsoft/mai-ds-r1:free',
|
||||
'microsoft/phi-3-medium-128k-instruct',
|
||||
'microsoft/phi-3-mini-128k-instruct',
|
||||
'microsoft/phi-3.5-mini-128k-instruct',
|
||||
'microsoft/phi-4',
|
||||
'microsoft/phi-4-multimodal-instruct',
|
||||
'microsoft/phi-4-reasoning-plus',
|
||||
'microsoft/phi-4-reasoning-plus:free',
|
||||
'microsoft/phi-4-reasoning:free',
|
||||
'microsoft/wizardlm-2-8x22b',
|
||||
'minimax/minimax-01',
|
||||
'mistralai/codestral-2501',
|
||||
'mistralai/devstral-small',
|
||||
'mistralai/devstral-small:free',
|
||||
'mistralai/magistral-medium-2506',
|
||||
'mistralai/magistral-medium-2506:thinking',
|
||||
'mistralai/magistral-small-2506',
|
||||
'mistralai/ministral-3b',
|
||||
'mistralai/ministral-8b',
|
||||
'mistralai/mistral-7b-instruct',
|
||||
'mistralai/mistral-7b-instruct-v0.1',
|
||||
'mistralai/mistral-7b-instruct-v0.2',
|
||||
'mistralai/mistral-7b-instruct-v0.3',
|
||||
'mistralai/mistral-7b-instruct:free',
|
||||
'mistralai/mistral-large',
|
||||
'mistralai/mistral-large-2407',
|
||||
'mistralai/mistral-large-2411',
|
||||
'mistralai/mistral-medium',
|
||||
'mistralai/mistral-medium-3',
|
||||
'mistralai/mistral-nemo',
|
||||
'mistralai/mistral-nemo:free',
|
||||
'mistralai/mistral-saba',
|
||||
'mistralai/mistral-small',
|
||||
'mistralai/mistral-small-24b-instruct-2501',
|
||||
'mistralai/mistral-small-24b-instruct-2501:free',
|
||||
'mistralai/mistral-small-3.1-24b-instruct',
|
||||
'mistralai/mistral-small-3.1-24b-instruct:free',
|
||||
'mistralai/mistral-tiny',
|
||||
'mistralai/mixtral-8x22b-instruct',
|
||||
'mistralai/mixtral-8x7b-instruct',
|
||||
'mistralai/pixtral-12b',
|
||||
'mistralai/pixtral-large-2411',
|
||||
'moonshotai/kimi-vl-a3b-thinking:free',
|
||||
'moonshotai/moonlight-16b-a3b-instruct:free',
|
||||
'neversleep/llama-3-lumimaid-70b',
|
||||
'neversleep/llama-3-lumimaid-8b',
|
||||
'neversleep/llama-3.1-lumimaid-70b',
|
||||
'neversleep/llama-3.1-lumimaid-8b',
|
||||
'neversleep/noromaid-20b',
|
||||
'nothingiisreal/mn-celeste-12b',
|
||||
'nousresearch/deephermes-3-llama-3-8b-preview:free',
|
||||
'nousresearch/deephermes-3-mistral-24b-preview:free',
|
||||
'nousresearch/hermes-2-pro-llama-3-8b',
|
||||
'nousresearch/hermes-3-llama-3.1-405b',
|
||||
'nousresearch/hermes-3-llama-3.1-70b',
|
||||
'nousresearch/nous-hermes-2-mixtral-8x7b-dpo',
|
||||
'nvidia/llama-3.1-nemotron-70b-instruct',
|
||||
'nvidia/llama-3.1-nemotron-ultra-253b-v1',
|
||||
'nvidia/llama-3.1-nemotron-ultra-253b-v1:free',
|
||||
'nvidia/llama-3.3-nemotron-super-49b-v1',
|
||||
'nvidia/llama-3.3-nemotron-super-49b-v1:free',
|
||||
'open-r1/olympiccoder-32b:free',
|
||||
'openai/chatgpt-4o-latest',
|
||||
'openai/codex-mini',
|
||||
'openai/gpt-3.5-turbo',
|
||||
'openai/gpt-3.5-turbo-0125',
|
||||
'openai/gpt-3.5-turbo-0613',
|
||||
'openai/gpt-3.5-turbo-1106',
|
||||
'openai/gpt-3.5-turbo-16k',
|
||||
'openai/gpt-3.5-turbo-instruct',
|
||||
'openai/gpt-4',
|
||||
'openai/gpt-4-0314',
|
||||
'openai/gpt-4-1106-preview',
|
||||
'openai/gpt-4-turbo',
|
||||
'openai/gpt-4-turbo-preview',
|
||||
'openai/gpt-4.1',
|
||||
'openai/gpt-4.1-mini',
|
||||
'openai/gpt-4.1-nano',
|
||||
'openai/gpt-4.5-preview',
|
||||
'openai/gpt-4o',
|
||||
'openai/gpt-4o-2024-05-13',
|
||||
'openai/gpt-4o-2024-08-06',
|
||||
'openai/gpt-4o-2024-11-20',
|
||||
'openai/gpt-4o-mini-2024-07-18',
|
||||
'openai/gpt-4o-mini-search-preview',
|
||||
'openai/gpt-4o-search-preview',
|
||||
'openai/gpt-4o:extended',
|
||||
'openai/o1',
|
||||
'openai/o1-mini',
|
||||
'openai/o1-mini-2024-09-12',
|
||||
'openai/o1-preview',
|
||||
'openai/o1-preview-2024-09-12',
|
||||
'openai/o1-pro',
|
||||
'openai/o3',
|
||||
'openai/o3-mini',
|
||||
'openai/o3-mini-high',
|
||||
'openai/o3-pro',
|
||||
'openai/o4-mini',
|
||||
'openai/o4-mini-high',
|
||||
'opengvlab/internvl3-14b:free',
|
||||
'opengvlab/internvl3-2b:free',
|
||||
'openrouter/auto',
|
||||
'perplexity/llama-3.1-sonar-large-128k-online',
|
||||
'perplexity/llama-3.1-sonar-small-128k-online',
|
||||
'perplexity/r1-1776',
|
||||
'perplexity/sonar',
|
||||
'perplexity/sonar-deep-research',
|
||||
'perplexity/sonar-pro',
|
||||
'perplexity/sonar-reasoning',
|
||||
'perplexity/sonar-reasoning-pro',
|
||||
'pygmalionai/mythalion-13b',
|
||||
'qwen/qwen-2-72b-instruct',
|
||||
'qwen/qwen-2.5-72b-instruct',
|
||||
'qwen/qwen-2.5-72b-instruct:free',
|
||||
'qwen/qwen-2.5-7b-instruct',
|
||||
'qwen/qwen-2.5-7b-instruct:free',
|
||||
'qwen/qwen-2.5-coder-32b-instruct',
|
||||
'qwen/qwen-2.5-coder-32b-instruct:free',
|
||||
'qwen/qwen-2.5-vl-7b-instruct',
|
||||
'qwen/qwen-2.5-vl-7b-instruct:free',
|
||||
'qwen/qwen-max',
|
||||
'qwen/qwen-plus',
|
||||
'qwen/qwen-turbo',
|
||||
'qwen/qwen-vl-max',
|
||||
'qwen/qwen-vl-plus',
|
||||
'qwen/qwen2.5-vl-32b-instruct',
|
||||
'qwen/qwen2.5-vl-32b-instruct:free',
|
||||
'qwen/qwen2.5-vl-3b-instruct:free',
|
||||
'qwen/qwen2.5-vl-72b-instruct',
|
||||
'qwen/qwen2.5-vl-72b-instruct:free',
|
||||
'qwen/qwen3-14b',
|
||||
'qwen/qwen3-14b:free',
|
||||
'qwen/qwen3-235b-a22b',
|
||||
'qwen/qwen3-235b-a22b:free',
|
||||
'qwen/qwen3-30b-a3b',
|
||||
'qwen/qwen3-30b-a3b:free',
|
||||
'qwen/qwen3-32b',
|
||||
'qwen/qwen3-32b:free',
|
||||
'qwen/qwen3-8b',
|
||||
'qwen/qwen3-8b:free',
|
||||
'qwen/qwq-32b',
|
||||
'qwen/qwq-32b-preview',
|
||||
'qwen/qwq-32b:free',
|
||||
'raifle/sorcererlm-8x22b',
|
||||
'rekaai/reka-flash-3:free',
|
||||
'sao10k/fimbulvetr-11b-v2',
|
||||
'sao10k/l3-euryale-70b',
|
||||
'sao10k/l3-lunaris-8b',
|
||||
'sao10k/l3.1-euryale-70b',
|
||||
'sao10k/l3.3-euryale-70b',
|
||||
'sarvamai/sarvam-m:free',
|
||||
'scb10x/llama3.1-typhoon2-70b-instruct',
|
||||
'sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b',
|
||||
'shisa-ai/shisa-v2-llama3.3-70b:free',
|
||||
'sophosympatheia/midnight-rose-70b',
|
||||
'thedrummer/anubis-pro-105b-v1',
|
||||
'thedrummer/rocinante-12b',
|
||||
'thedrummer/skyfall-36b-v2',
|
||||
'thedrummer/unslopnemo-12b',
|
||||
'thedrummer/valkyrie-49b-v1',
|
||||
'thudm/glm-4-32b',
|
||||
'thudm/glm-4-32b:free',
|
||||
'thudm/glm-z1-32b',
|
||||
'thudm/glm-z1-32b:free',
|
||||
'thudm/glm-z1-rumination-32b',
|
||||
'tngtech/deepseek-r1t-chimera:free',
|
||||
'undi95/remm-slerp-l2-13b',
|
||||
'undi95/toppy-m-7b',
|
||||
'x-ai/grok-2-1212',
|
||||
'x-ai/grok-2-vision-1212',
|
||||
'x-ai/grok-3-beta',
|
||||
'x-ai/grok-3-mini-beta',
|
||||
'x-ai/grok-beta',
|
||||
'x-ai/grok-vision-beta'
|
||||
]
|
||||
};
|
||||
|
||||
const getDefaultModel = (provider: string | undefined): string => {
|
||||
@@ -256,14 +590,16 @@ const getDefaultModel = (provider: string | undefined): string => {
|
||||
return MODEL_LIST.deepseek[0];
|
||||
case 'aimlapi':
|
||||
return MODEL_LIST.aimlapi[0];
|
||||
case 'openrouter':
|
||||
return MODEL_LIST.openrouter[0];
|
||||
default:
|
||||
return MODEL_LIST.openai[0];
|
||||
}
|
||||
};
|
||||
|
||||
export enum DEFAULT_TOKEN_LIMITS {
|
||||
DEFAULT_MAX_TOKENS_INPUT = 40960,
|
||||
DEFAULT_MAX_TOKENS_OUTPUT = 4096
|
||||
DEFAULT_MAX_TOKENS_INPUT = 4096,
|
||||
DEFAULT_MAX_TOKENS_OUTPUT = 500
|
||||
}
|
||||
|
||||
const validateConfig = (
|
||||
@@ -311,6 +647,22 @@ export const configValidators = {
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS](value) {
|
||||
try {
|
||||
// Custom headers must be a valid JSON string
|
||||
if (typeof value === 'string') {
|
||||
JSON.parse(value);
|
||||
}
|
||||
return value;
|
||||
} catch (error) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_API_CUSTOM_HEADERS,
|
||||
false,
|
||||
'Must be a valid JSON string of headers'
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) {
|
||||
value = parseInt(value);
|
||||
validateConfig(
|
||||
@@ -431,6 +783,7 @@ export const configValidators = {
|
||||
'groq',
|
||||
'deepseek',
|
||||
'aimlapi',
|
||||
'openrouter'
|
||||
].includes(value) || value.startsWith('ollama'),
|
||||
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
|
||||
);
|
||||
@@ -466,6 +819,14 @@ export const configValidators = {
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -482,6 +843,7 @@ export enum OCO_AI_PROVIDER_ENUM {
|
||||
MLX = 'mlx',
|
||||
DEEPSEEK = 'deepseek',
|
||||
AIMLAPI = 'aimlapi',
|
||||
OPENROUTER = 'openrouter'
|
||||
}
|
||||
|
||||
export type ConfigType = {
|
||||
@@ -489,6 +851,7 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
|
||||
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
|
||||
[CONFIG_KEYS.OCO_API_URL]?: string;
|
||||
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
[CONFIG_KEYS.OCO_WHY]: boolean;
|
||||
@@ -501,6 +864,7 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT]: boolean;
|
||||
[CONFIG_KEYS.OCO_OMIT_SCOPE]: boolean;
|
||||
[CONFIG_KEYS.OCO_TEST_MOCK_TYPE]: string;
|
||||
[CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT]: boolean;
|
||||
};
|
||||
|
||||
export const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
@@ -548,7 +912,8 @@ export const DEFAULT_CONFIG = {
|
||||
OCO_TEST_MOCK_TYPE: 'commit-message',
|
||||
OCO_WHY: false,
|
||||
OCO_OMIT_SCOPE: false,
|
||||
OCO_GITPUSH: true // todo: deprecate
|
||||
OCO_GITPUSH: true, // todo: deprecate
|
||||
OCO_HOOK_AUTO_UNCOMMENT: false
|
||||
};
|
||||
|
||||
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
@@ -571,6 +936,7 @@ const getEnvConfig = (envPath: string) => {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_API_URL: process.env.OCO_API_URL,
|
||||
OCO_API_KEY: process.env.OCO_API_KEY,
|
||||
OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS,
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
|
||||
|
||||
OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT),
|
||||
@@ -712,28 +1078,220 @@ export const setConfig = (
|
||||
outro(`${chalk.green('✔')} config successfully set`);
|
||||
};
|
||||
|
||||
// --- HELP MESSAGE GENERATION ---
|
||||
function getConfigKeyDetails(key) {
|
||||
switch (key) {
|
||||
case CONFIG_KEYS.OCO_MODEL:
|
||||
return {
|
||||
description: 'The AI model to use for generating commit messages',
|
||||
values: MODEL_LIST
|
||||
};
|
||||
case CONFIG_KEYS.OCO_AI_PROVIDER:
|
||||
return {
|
||||
description: 'The AI provider to use',
|
||||
values: Object.values(OCO_AI_PROVIDER_ENUM)
|
||||
};
|
||||
case CONFIG_KEYS.OCO_PROMPT_MODULE:
|
||||
return {
|
||||
description: 'The prompt module to use for commit message generation',
|
||||
values: Object.values(OCO_PROMPT_MODULE_ENUM)
|
||||
};
|
||||
case CONFIG_KEYS.OCO_LANGUAGE:
|
||||
return {
|
||||
description: 'The locale to use for commit messages',
|
||||
values: Object.keys(i18n)
|
||||
};
|
||||
case CONFIG_KEYS.OCO_TEST_MOCK_TYPE:
|
||||
return {
|
||||
description: 'The type of test mock to use',
|
||||
values: ['commit-message', 'prompt-module-commitlint-config']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_ONE_LINE_COMMIT:
|
||||
return {
|
||||
description: 'One line commit message',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_DESCRIPTION:
|
||||
return {
|
||||
description:
|
||||
'Postface a message with ~3 sentences description of the changes',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_EMOJI:
|
||||
return {
|
||||
description: 'Preface a message with GitMoji',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_WHY:
|
||||
return {
|
||||
description:
|
||||
'Output a short description of why the changes were done after the commit message (default: false)',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_OMIT_SCOPE:
|
||||
return {
|
||||
description: 'Do not include a scope in the commit message',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_GITPUSH:
|
||||
return {
|
||||
description:
|
||||
'Push to git after commit (deprecated). If false, oco will exit after committing',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_TOKENS_MAX_INPUT:
|
||||
return {
|
||||
description: 'Max model token limit',
|
||||
values: ['Any positive integer']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT:
|
||||
return {
|
||||
description: 'Max response tokens',
|
||||
values: ['Any positive integer']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_API_KEY:
|
||||
return {
|
||||
description: 'API key for the selected provider',
|
||||
values: ['String (required for most providers)']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_API_URL:
|
||||
return {
|
||||
description:
|
||||
'Custom API URL - may be used to set proxy path to OpenAI API',
|
||||
values: ["URL string (must start with 'http://' or 'https://')"]
|
||||
};
|
||||
case CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
return {
|
||||
description: 'Message template placeholder',
|
||||
values: ['String (must start with $)']
|
||||
};
|
||||
case CONFIG_KEYS.OCO_HOOK_AUTO_UNCOMMENT:
|
||||
return {
|
||||
description: 'Automatically uncomment the commit message in the hook',
|
||||
values: ['true', 'false']
|
||||
};
|
||||
default:
|
||||
return {
|
||||
description: 'String value',
|
||||
values: ['Any string']
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function printConfigKeyHelp(param) {
|
||||
if (!Object.values(CONFIG_KEYS).includes(param)) {
|
||||
console.log(chalk.red(`Unknown config parameter: ${param}`));
|
||||
return;
|
||||
}
|
||||
|
||||
const details = getConfigKeyDetails(param as CONFIG_KEYS);
|
||||
|
||||
let desc = details.description;
|
||||
let defaultValue = undefined;
|
||||
if (param in DEFAULT_CONFIG) {
|
||||
defaultValue = DEFAULT_CONFIG[param];
|
||||
}
|
||||
|
||||
console.log(chalk.bold(`\n${param}:`));
|
||||
console.log(chalk.gray(` Description: ${desc}`));
|
||||
if (defaultValue !== undefined) {
|
||||
// Print booleans and numbers as-is, strings without quotes
|
||||
if (typeof defaultValue === 'string') {
|
||||
console.log(chalk.gray(` Default: ${defaultValue}`));
|
||||
} else {
|
||||
console.log(chalk.gray(` Default: ${defaultValue}`));
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(details.values)) {
|
||||
console.log(chalk.gray(' Accepted values:'));
|
||||
details.values.forEach((value) => {
|
||||
console.log(chalk.gray(` - ${value}`));
|
||||
});
|
||||
} else {
|
||||
console.log(chalk.gray(' Accepted values by provider:'));
|
||||
Object.entries(details.values).forEach(([provider, values]) => {
|
||||
console.log(chalk.gray(` ${provider}:`));
|
||||
(values as string[]).forEach((value) => {
|
||||
console.log(chalk.gray(` - ${value}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function printAllConfigHelp() {
|
||||
console.log(chalk.bold('Available config parameters:'));
|
||||
for (const key of Object.values(CONFIG_KEYS).sort()) {
|
||||
const details = getConfigKeyDetails(key);
|
||||
// Try to get the default value from DEFAULT_CONFIG
|
||||
let defaultValue = undefined;
|
||||
if (key in DEFAULT_CONFIG) {
|
||||
defaultValue = DEFAULT_CONFIG[key];
|
||||
}
|
||||
|
||||
console.log(chalk.bold(`\n${key}:`));
|
||||
console.log(chalk.gray(` Description: ${details.description}`));
|
||||
if (defaultValue !== undefined) {
|
||||
if (typeof defaultValue === 'string') {
|
||||
console.log(chalk.gray(` Default: ${defaultValue}`));
|
||||
} else {
|
||||
console.log(chalk.gray(` Default: ${defaultValue}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
chalk.yellow(
|
||||
'\nUse "oco config describe [PARAMETER]" to see accepted values and more details for a specific config parameter.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export const configCommand = command(
|
||||
{
|
||||
name: COMMANDS.config,
|
||||
parameters: ['<mode>', '<key=values...>']
|
||||
parameters: ['<mode>', '[key=values...]'],
|
||||
help: {
|
||||
description: 'Configure opencommit settings',
|
||||
examples: [
|
||||
'Describe all config parameters: oco config describe',
|
||||
'Describe a specific parameter: oco config describe OCO_MODEL',
|
||||
'Get a config value: oco config get OCO_MODEL',
|
||||
'Set a config value: oco config set OCO_MODEL=gpt-4'
|
||||
]
|
||||
}
|
||||
},
|
||||
async (argv) => {
|
||||
try {
|
||||
const { mode, keyValues } = argv._;
|
||||
intro(`COMMAND: config ${mode} ${keyValues}`);
|
||||
|
||||
if (mode === CONFIG_MODES.get) {
|
||||
if (mode === CONFIG_MODES.describe) {
|
||||
if (!keyValues || keyValues.length === 0) {
|
||||
printAllConfigHelp();
|
||||
} else {
|
||||
for (const key of keyValues) {
|
||||
printConfigKeyHelp(key);
|
||||
}
|
||||
}
|
||||
process.exit(0);
|
||||
} else if (mode === CONFIG_MODES.get) {
|
||||
if (!keyValues || keyValues.length === 0) {
|
||||
throw new Error('No config keys specified for get mode');
|
||||
}
|
||||
const config = getConfig() || {};
|
||||
for (const key of keyValues) {
|
||||
outro(`${key}=${config[key as keyof typeof config]}`);
|
||||
}
|
||||
} else if (mode === CONFIG_MODES.set) {
|
||||
if (!keyValues || keyValues.length === 0) {
|
||||
throw new Error('No config keys specified for set mode');
|
||||
}
|
||||
await setConfig(
|
||||
keyValues.map((keyValue) => keyValue.split('=') as [string, string])
|
||||
);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported mode: ${mode}. Valid modes are: "set" and "get"`
|
||||
`Unsupported mode: ${mode}. Valid modes are: "set", "get", and "describe"`
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -56,10 +56,14 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
const fileContent = await fs.readFile(messageFilePath);
|
||||
|
||||
await fs.writeFile(
|
||||
messageFilePath,
|
||||
commitMessage + '\n' + fileContent.toString()
|
||||
);
|
||||
const messageWithComment = `# ${commitMessage}\n\n# ---------- [OpenCommit] ---------- #\n# Remove the # above to use this generated commit message.\n# To cancel the commit, just close this window without making any changes.\n\n${fileContent.toString()}`;
|
||||
const messageWithoutComment = `${commitMessage}\n\n${fileContent.toString()}`;
|
||||
|
||||
const message = config.OCO_HOOK_AUTO_UNCOMMENT
|
||||
? messageWithoutComment
|
||||
: messageWithComment;
|
||||
|
||||
await fs.writeFile(messageFilePath, message);
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${error}`);
|
||||
process.exit(1);
|
||||
|
||||
@@ -11,6 +11,7 @@ export interface AiEngineConfig {
|
||||
maxTokensOutput: number;
|
||||
maxTokensInput: number;
|
||||
baseURL?: string;
|
||||
customHeaders?: Record<string, string>;
|
||||
}
|
||||
|
||||
type Client =
|
||||
|
||||
@@ -8,6 +8,7 @@ import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -54,8 +55,8 @@ export class AnthropicEngine implements AiEngine {
|
||||
const data = await this.client.messages.create(params);
|
||||
|
||||
const message = data?.content[0].text;
|
||||
|
||||
return message;
|
||||
let content = message;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
@@ -7,6 +7,7 @@ import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -52,7 +53,9 @@ export class AzureEngine implements AiEngine {
|
||||
if (message?.content === null) {
|
||||
return undefined;
|
||||
}
|
||||
return message?.content;
|
||||
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${this.config.model}`);
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { OpenAiEngine, OpenAiConfig } from './openAI';
|
||||
import { OpenAiEngine, OpenAiConfig } from './openAi';
|
||||
|
||||
export interface DeepseekConfig extends OpenAiConfig {}
|
||||
|
||||
@@ -41,8 +42,8 @@ export class DeepseekEngine extends OpenAiEngine {
|
||||
const completion = await this.client.chat.completions.create(params);
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface FlowiseAiConfig extends AiEngineConfig {}
|
||||
@@ -36,7 +37,8 @@ export class FlowiseEngine implements AiEngine {
|
||||
try {
|
||||
const response = await this.client.post('', payload);
|
||||
const message = response.data;
|
||||
return message?.text;
|
||||
let content = message?.text;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error('local model issues. details: ' + message);
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from '@google/generative-ai';
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface GeminiConfig extends AiEngineConfig {}
|
||||
@@ -71,7 +72,8 @@ export class GeminiEngine implements AiEngine {
|
||||
}
|
||||
});
|
||||
|
||||
return result.response.text();
|
||||
const content = result.response.text();
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
|
||||
@@ -7,4 +7,4 @@ export class GroqEngine extends OpenAiEngine {
|
||||
config.baseURL = 'https://api.groq.com/openai/v1';
|
||||
super(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,21 @@
|
||||
import axios from 'axios';
|
||||
import { Mistral } from '@mistralai/mistralai';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
import {
|
||||
AssistantMessage as MistralAssistantMessage,
|
||||
SystemMessage as MistralSystemMessage,
|
||||
ToolMessage as MistralToolMessage,
|
||||
UserMessage as MistralUserMessage
|
||||
} from '@mistralai/mistralai/models/components';
|
||||
|
||||
// Using any for Mistral types to avoid type declaration issues
|
||||
export interface MistralAiConfig extends AiEngineConfig {}
|
||||
export type MistralCompletionMessageParam = Array<
|
||||
| (MistralSystemMessage & { role: "system" })
|
||||
| (MistralUserMessage & { role: "user" })
|
||||
| (MistralAssistantMessage & { role: "assistant" })
|
||||
| (MistralToolMessage & { role: "tool" })
|
||||
>
|
||||
export type MistralCompletionMessageParam = Array<any>;
|
||||
|
||||
// Import Mistral dynamically to avoid TS errors
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const Mistral = require('@mistralai/mistralai').Mistral;
|
||||
|
||||
export class MistralAiEngine implements AiEngine {
|
||||
config: MistralAiConfig;
|
||||
client: Mistral;
|
||||
client: any; // Using any type for Mistral client to avoid TS errors
|
||||
|
||||
constructor(config: MistralAiConfig) {
|
||||
this.config = config;
|
||||
@@ -29,7 +23,10 @@ export class MistralAiEngine implements AiEngine {
|
||||
if (!config.baseURL) {
|
||||
this.client = new Mistral({ apiKey: config.apiKey });
|
||||
} else {
|
||||
this.client = new Mistral({ apiKey: config.apiKey, serverURL: config.baseURL });
|
||||
this.client = new Mistral({
|
||||
apiKey: config.apiKey,
|
||||
serverURL: config.baseURL
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,15 +53,15 @@ export class MistralAiEngine implements AiEngine {
|
||||
|
||||
const completion = await this.client.chat.complete(params);
|
||||
|
||||
if (!completion.choices)
|
||||
throw Error('No completion choice available.')
|
||||
|
||||
if (!completion.choices) throw Error('No completion choice available.');
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
if (!message || !message.content)
|
||||
throw Error('No completion choice available.')
|
||||
throw Error('No completion choice available.');
|
||||
|
||||
return message.content as string;
|
||||
let content = message.content as string;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
|
||||
@@ -1,47 +1,47 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
import { chown } from 'fs';
|
||||
|
||||
interface MLXConfig extends AiEngineConfig {}
|
||||
|
||||
export class MLXEngine implements AiEngine {
|
||||
config: MLXConfig;
|
||||
client: AxiosInstance;
|
||||
config: MLXConfig;
|
||||
client: AxiosInstance;
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:8080/v1/chat/completions',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:8080/v1/chat/completions',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
});
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> {
|
||||
const params = {
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
repetition_penalty: 1.5,
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
|
||||
const choices = response.data.choices;
|
||||
const message = choices[0].message;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`MLX provider error: ${message}`);
|
||||
}
|
||||
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>):
|
||||
Promise<string | undefined> {
|
||||
const params = {
|
||||
messages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
repetition_penalty: 1.5,
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
const response = await this.client.post(
|
||||
this.client.getUri(this.config),
|
||||
params
|
||||
);
|
||||
|
||||
const choices = response.data.choices;
|
||||
const message = choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`MLX provider error: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
interface OllamaConfig extends AiEngineConfig {}
|
||||
@@ -10,11 +11,18 @@ export class OllamaEngine implements AiEngine {
|
||||
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
|
||||
// Combine base headers with custom headers
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
...config.customHeaders
|
||||
};
|
||||
|
||||
this.client = axios.create({
|
||||
url: config.baseURL
|
||||
? `${config.baseURL}/${config.apiKey}`
|
||||
: 'http://localhost:11434/api/chat',
|
||||
headers: { 'Content-Type': 'application/json' }
|
||||
headers
|
||||
});
|
||||
}
|
||||
|
||||
@@ -35,12 +43,7 @@ export class OllamaEngine implements AiEngine {
|
||||
|
||||
const { message } = response.data;
|
||||
let content = message?.content;
|
||||
|
||||
if (content && content.includes('<think>')) {
|
||||
return content.replace(/<think>[\s\S]*?<\/think>/g, '').trim();
|
||||
}
|
||||
|
||||
return content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error(`Ollama provider error: ${message}`);
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import axios from 'axios';
|
||||
import { OpenAI } from 'openai';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { parseCustomHeaders } from '../utils/engine';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
|
||||
@@ -13,11 +15,22 @@ export class OpenAiEngine implements AiEngine {
|
||||
constructor(config: OpenAiConfig) {
|
||||
this.config = config;
|
||||
|
||||
if (!config.baseURL) {
|
||||
this.client = new OpenAI({ apiKey: config.apiKey });
|
||||
} else {
|
||||
this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
|
||||
const clientOptions: OpenAI.ClientOptions = {
|
||||
apiKey: config.apiKey
|
||||
};
|
||||
|
||||
if (config.baseURL) {
|
||||
clientOptions.baseURL = config.baseURL;
|
||||
}
|
||||
|
||||
if (config.customHeaders) {
|
||||
const headers = parseCustomHeaders(config.customHeaders);
|
||||
if (Object.keys(headers).length > 0) {
|
||||
clientOptions.defaultHeaders = headers;
|
||||
}
|
||||
}
|
||||
|
||||
this.client = new OpenAI(clientOptions);
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
@@ -45,8 +58,8 @@ export class OpenAiEngine implements AiEngine {
|
||||
const completion = await this.client.chat.completions.create(params);
|
||||
|
||||
const message = completion.choices[0].message;
|
||||
|
||||
return message?.content;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
|
||||
49
src/engine/openrouter.ts
Normal file
49
src/engine/openrouter.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import OpenAI from 'openai';
|
||||
import { AiEngine, AiEngineConfig } from './Engine';
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
import { removeContentTags } from '../utils/removeContentTags';
|
||||
|
||||
interface OpenRouterConfig extends AiEngineConfig {}
|
||||
|
||||
export class OpenRouterEngine implements AiEngine {
|
||||
client: AxiosInstance;
|
||||
|
||||
constructor(public config: OpenRouterConfig) {
|
||||
this.client = axios.create({
|
||||
baseURL: 'https://openrouter.ai/api/v1/chat/completions',
|
||||
headers: {
|
||||
Authorization: `Bearer ${config.apiKey}`,
|
||||
'HTTP-Referer': 'https://github.com/di-sukharev/opencommit',
|
||||
'X-Title': 'OpenCommit',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | null> => {
|
||||
try {
|
||||
const response = await this.client.post('', {
|
||||
model: this.config.model,
|
||||
messages
|
||||
});
|
||||
|
||||
const message = response.data.choices[0].message;
|
||||
let content = message?.content;
|
||||
return removeContentTags(content, 'think');
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const openRouterError = error.response.data.error;
|
||||
|
||||
if (openRouterError) throw new Error(openRouterError.message);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -14,7 +14,10 @@ const generateCommitMessageChatCompletionPrompt = async (
|
||||
fullGitMojiSpec: boolean,
|
||||
context: string
|
||||
): Promise<Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>> => {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(fullGitMojiSpec, context);
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
|
||||
fullGitMojiSpec,
|
||||
context
|
||||
);
|
||||
|
||||
const chatContextAsCompletionRequest = [...INIT_MESSAGES_PROMPT];
|
||||
|
||||
@@ -38,7 +41,7 @@ const ADJUSTMENT_FACTOR = 20;
|
||||
export const generateCommitMessageByDiff = async (
|
||||
diff: string,
|
||||
fullGitMojiSpec: boolean = false,
|
||||
context: string = ""
|
||||
context: string = ''
|
||||
): Promise<string> => {
|
||||
try {
|
||||
const INIT_MESSAGES_PROMPT = await getMainCommitPrompt(
|
||||
@@ -75,7 +78,7 @@ export const generateCommitMessageByDiff = async (
|
||||
const messages = await generateCommitMessageChatCompletionPrompt(
|
||||
diff,
|
||||
fullGitMojiSpec,
|
||||
context,
|
||||
context
|
||||
);
|
||||
|
||||
const engine = getEngine();
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"localLanguage": "한국어",
|
||||
"commitFix": "fix(server.ts): 포트 변수를 소문자 port에서 대문자 PORT로 변경",
|
||||
"commitFeat": "피트(server.ts): process.env.PORT 환경 변수 지원 추가",
|
||||
"commitFeat": "feat(server.ts): process.env.PORT 환경 변수 지원 추가",
|
||||
"commitDescription": "포트 변수는 이제 PORT로 이름이 지정되어 상수인 PORT와 일관성 있는 이름 규칙을 따릅니다. 환경 변수 지원을 통해 애플리케이션은 이제 process.env.PORT 환경 변수로 지정된 사용 가능한 모든 포트에서 실행할 수 있으므로 더 유연해졌습니다.",
|
||||
"commitFixOmitScope": "fix: 포트 변수를 소문자 port에서 대문자 PORT로 변경",
|
||||
"commitFeatOmitScope": "피트: process.env.PORT 환경 변수 지원 추가"
|
||||
"commitFeatOmitScope": "feat: process.env.PORT 환경 변수 지원 추가"
|
||||
}
|
||||
|
||||
@@ -36,6 +36,19 @@ export const runMigrations = async () => {
|
||||
const config = getConfig();
|
||||
if (config.OCO_AI_PROVIDER === OCO_AI_PROVIDER_ENUM.TEST) return;
|
||||
|
||||
// skip unhandled providers in migration00
|
||||
if (
|
||||
[
|
||||
OCO_AI_PROVIDER_ENUM.DEEPSEEK,
|
||||
OCO_AI_PROVIDER_ENUM.GROQ,
|
||||
OCO_AI_PROVIDER_ENUM.MISTRAL,
|
||||
OCO_AI_PROVIDER_ENUM.MLX,
|
||||
OCO_AI_PROVIDER_ENUM.OPENROUTER
|
||||
].includes(config.OCO_AI_PROVIDER)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const completedMigrations = getCompletedMigrations();
|
||||
|
||||
let isMigrated = false;
|
||||
|
||||
@@ -53,7 +53,7 @@ export const configureCommitlintIntegration = async (force = false) => {
|
||||
|
||||
spin.start('Generating consistency with given @commitlint rules');
|
||||
|
||||
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig);
|
||||
const prompts = inferPromptsFromCommitlintConfig(commitLintConfig as any);
|
||||
|
||||
const consistencyPrompts =
|
||||
commitlintPrompts.GEN_COMMITLINT_CONSISTENCY_PROMPT(prompts);
|
||||
|
||||
@@ -58,16 +58,16 @@ const llmReadableRules: {
|
||||
caseRule: (key, applicable, value: string | Array<string>) =>
|
||||
`The ${key} should ${applicable} be in ${
|
||||
Array.isArray(value)
|
||||
? `one of the following case:
|
||||
? `one of the following case:
|
||||
- ${value.join('\n - ')}.`
|
||||
: `${value} case.`
|
||||
}`,
|
||||
emptyRule: (key, applicable) => `The ${key} should ${applicable} be empty.`,
|
||||
enumRule: (key, applicable, value: string | Array<string>) =>
|
||||
`The ${key} should ${applicable} be one of the following values:
|
||||
`The ${key} should ${applicable} be one of the following values:
|
||||
- ${Array.isArray(value) ? value.join('\n - ') : value}.`,
|
||||
enumTypeRule: (key, applicable, value: string | Array<string>, prompt) =>
|
||||
`The ${key} should ${applicable} be one of the following values:
|
||||
`The ${key} should ${applicable} be one of the following values:
|
||||
- ${
|
||||
Array.isArray(value)
|
||||
? value
|
||||
@@ -224,8 +224,12 @@ Here are the specific requirements and conventions that should be strictly follo
|
||||
|
||||
Commit Message Conventions:
|
||||
- The commit message consists of three parts: Header, Body, and Footer.
|
||||
- Header:
|
||||
- Format: ${config.OCO_OMIT_SCOPE ? '`<type>: <subject>`' : '`<type>(<scope>): <subject>`'}
|
||||
- Header:
|
||||
- Format: ${
|
||||
config.OCO_OMIT_SCOPE
|
||||
? '`<type>: <subject>`'
|
||||
: '`<type>(<scope>): <subject>`'
|
||||
}
|
||||
- ${prompts.join('\n- ')}
|
||||
|
||||
JSON Output Format:
|
||||
@@ -240,7 +244,7 @@ JSON Output Format:
|
||||
"commitDescription": "<Description of commit for both the bug fix and the feature>"
|
||||
}
|
||||
\`\`\`
|
||||
- The "commitDescription" should not include the commit message’s header, only the description.
|
||||
- The "commitDescription" should not include the commit message's header, only the description.
|
||||
- Description should not be more than 74 characters.
|
||||
|
||||
Additional Details:
|
||||
|
||||
@@ -4,7 +4,8 @@ import path from 'path';
|
||||
const findModulePath = (moduleName: string) => {
|
||||
const searchPaths = [
|
||||
path.join('node_modules', moduleName),
|
||||
path.join('node_modules', '.pnpm')
|
||||
path.join('node_modules', '.pnpm'),
|
||||
path.resolve(__dirname, '../..')
|
||||
];
|
||||
|
||||
for (const basePath of searchPaths) {
|
||||
@@ -59,7 +60,7 @@ export const getCommitLintPWDConfig =
|
||||
* ES Module (commitlint@v19.x.x. <= )
|
||||
* Directory import is not supported in ES Module resolution, so import the file directly
|
||||
*/
|
||||
modulePath = await findModulePath('@commitlint/load/lib/load.js');
|
||||
modulePath = findModulePath('@commitlint/load/lib/load.js');
|
||||
load = (await import(modulePath)).default;
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ export const getJSONBlock = (input: string): string => {
|
||||
if (jsonIndex > -1) {
|
||||
input = input.slice(jsonIndex + 8);
|
||||
const endJsonIndex = input.search('```');
|
||||
input = input.slice(0, endJsonIndex);
|
||||
input = input.slice(0, endJsonIndex);
|
||||
}
|
||||
return input;
|
||||
};
|
||||
|
||||
@@ -108,7 +108,7 @@ const getDescriptionInstruction = () =>
|
||||
|
||||
const getOneLineCommitInstruction = () =>
|
||||
config.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
? 'Craft a concise, single sentence, commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in one single message.'
|
||||
: '';
|
||||
|
||||
const getScopeInstruction = () =>
|
||||
@@ -155,9 +155,9 @@ const INIT_MAIN_PROMPT = (
|
||||
});
|
||||
|
||||
export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessageParam =
|
||||
{
|
||||
role: 'user',
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
{
|
||||
role: 'user',
|
||||
content: `diff --git a/src/server.ts b/src/server.ts
|
||||
index ad4db42..f3b18a9 100644
|
||||
--- a/src/server.ts
|
||||
+++ b/src/server.ts
|
||||
@@ -181,45 +181,49 @@ export const INIT_DIFF_PROMPT: OpenAI.Chat.Completions.ChatCompletionMessagePara
|
||||
+app.listen(process.env.PORT || PORT, () => {
|
||||
+ console.log(\`Server listening on port \${PORT}\`);
|
||||
});`
|
||||
};
|
||||
|
||||
const getContent = (translation: ConsistencyPrompt) => {
|
||||
const getCommitString = (commitWithScope: string, commitWithoutScope?: string) => {
|
||||
if (config.OCO_OMIT_SCOPE && commitWithoutScope) {
|
||||
return config.OCO_EMOJI
|
||||
? `🐛 ${removeConventionalCommitWord(commitWithoutScope)}`
|
||||
: commitWithoutScope;
|
||||
}
|
||||
return config.OCO_EMOJI
|
||||
? `🐛 ${removeConventionalCommitWord(commitWithScope)}`
|
||||
: commitWithScope;
|
||||
};
|
||||
|
||||
const fix = getCommitString(
|
||||
translation.commitFix,
|
||||
translation.commitFixOmitScope
|
||||
);
|
||||
const COMMIT_TYPES = {
|
||||
fix: '🐛',
|
||||
feat: '✨'
|
||||
} as const;
|
||||
|
||||
const feat = config.OCO_OMIT_SCOPE && translation.commitFeatOmitScope
|
||||
? (config.OCO_EMOJI
|
||||
? `✨ ${removeConventionalCommitWord(translation.commitFeatOmitScope)}`
|
||||
: translation.commitFeatOmitScope)
|
||||
: (config.OCO_EMOJI
|
||||
? `✨ ${removeConventionalCommitWord(translation.commitFeat)}`
|
||||
: translation.commitFeat);
|
||||
const generateCommitString = (
|
||||
type: keyof typeof COMMIT_TYPES,
|
||||
message: string
|
||||
): string => {
|
||||
const cleanMessage = removeConventionalCommitWord(message);
|
||||
return config.OCO_EMOJI ? `${COMMIT_TYPES[type]} ${cleanMessage}` : message;
|
||||
};
|
||||
|
||||
const getConsistencyContent = (translation: ConsistencyPrompt) => {
|
||||
const fixMessage =
|
||||
config.OCO_OMIT_SCOPE && translation.commitFixOmitScope
|
||||
? translation.commitFixOmitScope
|
||||
: translation.commitFix;
|
||||
|
||||
const featMessage =
|
||||
config.OCO_OMIT_SCOPE && translation.commitFeatOmitScope
|
||||
? translation.commitFeatOmitScope
|
||||
: translation.commitFeat;
|
||||
|
||||
const fix = generateCommitString('fix', fixMessage);
|
||||
const feat = config.OCO_ONE_LINE_COMMIT
|
||||
? ''
|
||||
: generateCommitString('feat', featMessage);
|
||||
|
||||
const description = config.OCO_DESCRIPTION
|
||||
? translation.commitDescription
|
||||
: '';
|
||||
|
||||
return `${fix}\n${feat}\n${description}`;
|
||||
return [fix, feat, description].filter(Boolean).join('\n');
|
||||
};
|
||||
|
||||
const INIT_CONSISTENCY_PROMPT = (
|
||||
translation: ConsistencyPrompt
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'assistant',
|
||||
content: getContent(translation)
|
||||
content: getConsistencyContent(translation)
|
||||
});
|
||||
|
||||
export const getMainCommitPrompt = async (
|
||||
@@ -246,7 +250,7 @@ export const getMainCommitPrompt = async (
|
||||
INIT_DIFF_PROMPT,
|
||||
INIT_CONSISTENCY_PROMPT(
|
||||
commitLintConfig.consistency[
|
||||
translation.localLanguage
|
||||
translation.localLanguage
|
||||
] as ConsistencyPrompt
|
||||
)
|
||||
];
|
||||
|
||||
@@ -12,17 +12,43 @@ import { GroqEngine } from '../engine/groq';
|
||||
import { MLXEngine } from '../engine/mlx';
|
||||
import { DeepseekEngine } from '../engine/deepseek';
|
||||
import { AimlApiEngine } from '../engine/aimlapi';
|
||||
import { OpenRouterEngine } from '../engine/openrouter';
|
||||
|
||||
export function parseCustomHeaders(headers: any): Record<string, string> {
|
||||
let parsedHeaders = {};
|
||||
|
||||
if (!headers) {
|
||||
return parsedHeaders;
|
||||
}
|
||||
|
||||
try {
|
||||
if (typeof headers === 'object' && !Array.isArray(headers)) {
|
||||
parsedHeaders = headers;
|
||||
} else {
|
||||
parsedHeaders = JSON.parse(headers);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
|
||||
);
|
||||
}
|
||||
|
||||
return parsedHeaders;
|
||||
}
|
||||
|
||||
export function getEngine(): AiEngine {
|
||||
const config = getConfig();
|
||||
const provider = config.OCO_AI_PROVIDER;
|
||||
|
||||
const customHeaders = parseCustomHeaders(config.OCO_API_CUSTOM_HEADERS);
|
||||
|
||||
const DEFAULT_CONFIG = {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_API_URL!,
|
||||
apiKey: config.OCO_API_KEY!
|
||||
apiKey: config.OCO_API_KEY!,
|
||||
customHeaders
|
||||
};
|
||||
|
||||
switch (provider) {
|
||||
@@ -59,6 +85,9 @@ export function getEngine(): AiEngine {
|
||||
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
|
||||
return new AimlApiEngine(DEFAULT_CONFIG);
|
||||
|
||||
case OCO_AI_PROVIDER_ENUM.OPENROUTER:
|
||||
return new OpenRouterEngine(DEFAULT_CONFIG);
|
||||
|
||||
default:
|
||||
return new OpenAiEngine(DEFAULT_CONFIG);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { execa } from 'execa';
|
||||
import { readFileSync } from 'fs';
|
||||
import ignore, { Ignore } from 'ignore';
|
||||
|
||||
import { join } from 'path';
|
||||
import { outro, spinner } from '@clack/prompts';
|
||||
|
||||
export const assertGitRepo = async () => {
|
||||
@@ -16,41 +16,44 @@ export const assertGitRepo = async () => {
|
||||
// (file) => `:(exclude)${file}`
|
||||
// );
|
||||
|
||||
export const getOpenCommitIgnore = (): Ignore => {
|
||||
export const getOpenCommitIgnore = async (): Promise<Ignore> => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const ig = ignore();
|
||||
|
||||
try {
|
||||
ig.add(readFileSync('.opencommitignore').toString().split('\n'));
|
||||
ig.add(
|
||||
readFileSync(join(gitDir, '.opencommitignore')).toString().split('\n')
|
||||
);
|
||||
} catch (e) {}
|
||||
|
||||
return ig;
|
||||
};
|
||||
|
||||
export const getCoreHooksPath = async (): Promise<string> => {
|
||||
const { stdout } = await execa('git', ['config', 'core.hooksPath']);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout } = await execa('git', ['config', 'core.hooksPath'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
return stdout;
|
||||
};
|
||||
|
||||
export const getStagedFiles = async (): Promise<string[]> => {
|
||||
const { stdout: gitDir } = await execa('git', [
|
||||
'rev-parse',
|
||||
'--show-toplevel'
|
||||
]);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout: files } = await execa('git', [
|
||||
'diff',
|
||||
'--name-only',
|
||||
'--cached',
|
||||
'--relative',
|
||||
gitDir
|
||||
]);
|
||||
const { stdout: files } = await execa(
|
||||
'git',
|
||||
['diff', '--name-only', '--cached', '--relative'],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
if (!files) return [];
|
||||
|
||||
const filesList = files.split('\n');
|
||||
|
||||
const ig = getOpenCommitIgnore();
|
||||
const ig = await getOpenCommitIgnore();
|
||||
const allowedFiles = filesList.filter((file) => !ig.ignores(file));
|
||||
|
||||
if (!allowedFiles) return [];
|
||||
@@ -59,12 +62,17 @@ export const getStagedFiles = async (): Promise<string[]> => {
|
||||
};
|
||||
|
||||
export const getChangedFiles = async (): Promise<string[]> => {
|
||||
const { stdout: modified } = await execa('git', ['ls-files', '--modified']);
|
||||
const { stdout: others } = await execa('git', [
|
||||
'ls-files',
|
||||
'--others',
|
||||
'--exclude-standard'
|
||||
]);
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const { stdout: modified } = await execa('git', ['ls-files', '--modified'], {
|
||||
cwd: gitDir
|
||||
});
|
||||
|
||||
const { stdout: others } = await execa(
|
||||
'git',
|
||||
['ls-files', '--others', '--exclude-standard'],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
const files = [...modified.split('\n'), ...others.split('\n')].filter(
|
||||
(file) => !!file
|
||||
@@ -74,16 +82,20 @@ export const getChangedFiles = async (): Promise<string[]> => {
|
||||
};
|
||||
|
||||
export const gitAdd = async ({ files }: { files: string[] }) => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const gitAddSpinner = spinner();
|
||||
|
||||
gitAddSpinner.start('Adding files to commit');
|
||||
|
||||
await execa('git', ['add', ...files]);
|
||||
await execa('git', ['add', ...files], { cwd: gitDir });
|
||||
|
||||
gitAddSpinner.stop('Done');
|
||||
gitAddSpinner.stop(`Staged ${files.length} files`);
|
||||
};
|
||||
|
||||
export const getDiff = async ({ files }: { files: string[] }) => {
|
||||
const gitDir = await getGitDir();
|
||||
|
||||
const lockFiles = files.filter(
|
||||
(file) =>
|
||||
file.includes('.lock') ||
|
||||
@@ -108,12 +120,20 @@ export const getDiff = async ({ files }: { files: string[] }) => {
|
||||
(file) => !file.includes('.lock') && !file.includes('-lock.')
|
||||
);
|
||||
|
||||
const { stdout: diff } = await execa('git', [
|
||||
'diff',
|
||||
'--staged',
|
||||
'--',
|
||||
...filesWithoutLocks
|
||||
]);
|
||||
const { stdout: diff } = await execa(
|
||||
'git',
|
||||
['diff', '--staged', '--', ...filesWithoutLocks],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
return diff;
|
||||
};
|
||||
|
||||
export const getGitDir = async (): Promise<string> => {
|
||||
const { stdout: gitDir } = await execa('git', [
|
||||
'rev-parse',
|
||||
'--show-toplevel'
|
||||
]);
|
||||
|
||||
return gitDir;
|
||||
};
|
||||
|
||||
57
src/utils/removeContentTags.ts
Normal file
57
src/utils/removeContentTags.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Removes content wrapped in specified tags from a string
|
||||
* @param content The content string to process
|
||||
* @param tag The tag name without angle brackets (e.g., 'think' for '<think></think>')
|
||||
* @returns The content with the specified tags and their contents removed, and trimmed
|
||||
*/
|
||||
export function removeContentTags<T extends string | null | undefined>(
|
||||
content: T,
|
||||
tag: string
|
||||
): T {
|
||||
if (!content || typeof content !== 'string') {
|
||||
return content;
|
||||
}
|
||||
|
||||
// Dynamic implementation for other cases
|
||||
const openTag = `<${tag}>`;
|
||||
const closeTag = `</${tag}>`;
|
||||
|
||||
// Parse the content and remove tags
|
||||
let result = '';
|
||||
let skipUntil: number | null = null;
|
||||
let depth = 0;
|
||||
|
||||
for (let i = 0; i < content.length; i++) {
|
||||
// Check for opening tag
|
||||
if (content.substring(i, i + openTag.length) === openTag) {
|
||||
depth++;
|
||||
if (depth === 1) {
|
||||
skipUntil = content.indexOf(closeTag, i + openTag.length);
|
||||
i = i + openTag.length - 1; // Skip the opening tag
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Check for closing tag
|
||||
else if (
|
||||
content.substring(i, i + closeTag.length) === closeTag &&
|
||||
depth > 0
|
||||
) {
|
||||
depth--;
|
||||
if (depth === 0) {
|
||||
i = i + closeTag.length - 1; // Skip the closing tag
|
||||
skipUntil = null;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Only add character if not inside a tag
|
||||
if (skipUntil === null) {
|
||||
result += content[i];
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize multiple spaces/tabs into a single space (preserves newlines), then trim.
|
||||
result = result.replace(/[ \t]+/g, ' ').trim();
|
||||
|
||||
return result as unknown as T;
|
||||
}
|
||||
@@ -125,7 +125,7 @@ describe('cli flow to push git branch', () => {
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
@@ -158,7 +158,7 @@ describe('cli flow to push git branch', () => {
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
@@ -186,7 +186,7 @@ describe('cli flow to push git branch', () => {
|
||||
await render('git', ['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(
|
||||
`OCO_AI_PROVIDER='test' node`,
|
||||
`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`,
|
||||
[resolve('./out/cli.cjs')],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
|
||||
@@ -9,7 +9,7 @@ it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
|
||||
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
|
||||
|
||||
@@ -34,7 +34,7 @@ it('cli flow to generate commit message for 1 changed file (not staged)', async
|
||||
|
||||
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' OCO_GITPUSH='true' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
|
||||
expect(await findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();
|
||||
|
||||
@@ -5,8 +5,8 @@ import { prepareEnvironment, wait } from '../utils';
|
||||
import path from 'path';
|
||||
|
||||
function getAbsolutePath(relativePath: string) {
|
||||
const scriptDir = path.dirname(__filename);
|
||||
return path.resolve(scriptDir, relativePath);
|
||||
// Use process.cwd() which should be the project root during test execution
|
||||
return path.resolve(process.cwd(), 'test/e2e/prompt-module', relativePath);
|
||||
}
|
||||
async function setupCommitlint(dir: string, ver: 9 | 18 | 19) {
|
||||
let packagePath, packageJsonPath, configPath;
|
||||
@@ -47,7 +47,7 @@ describe('cli flow to run "oco commitlint force"', () => {
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
@@ -83,7 +83,7 @@ describe('cli flow to run "oco commitlint force"', () => {
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
@@ -119,7 +119,7 @@ describe('cli flow to run "oco commitlint force"', () => {
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
@@ -160,7 +160,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint force \
|
||||
`,
|
||||
[],
|
||||
@@ -175,7 +175,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='prompt-module-commitlint-config' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} commitlint get \
|
||||
`,
|
||||
[],
|
||||
@@ -193,7 +193,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
`
|
||||
OCO_TEST_MOCK_TYPE='commit-message' \
|
||||
OCO_PROMPT_MODULE='@commitlint' \
|
||||
OCO_AI_PROVIDER='test' \
|
||||
OCO_AI_PROVIDER='test' OCO_GITPUSH='true' \
|
||||
node ${resolve('./out/cli.cjs')} \
|
||||
`,
|
||||
[],
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import 'cli-testing-library/extend-expect'
|
||||
import { configure } from 'cli-testing-library'
|
||||
import { jest } from '@jest/globals';
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { configure } from 'cli-testing-library';
|
||||
|
||||
// Make Jest available globally
|
||||
global.jest = jest;
|
||||
|
||||
/**
|
||||
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
|
||||
*/
|
||||
configure({ asyncUtilTimeout: 2000 })
|
||||
configure({ asyncUtilTimeout: 2000 });
|
||||
|
||||
@@ -122,6 +122,30 @@ describe('config', () => {
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
expect(config.OCO_OMIT_SCOPE).toEqual(true);
|
||||
});
|
||||
|
||||
it('should handle custom HTTP headers correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_API_CUSTOM_HEADERS: '{"X-Global-Header": "global-value"}'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_API_CUSTOM_HEADERS: '{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_API_CUSTOM_HEADERS).toEqual({"Authorization": "Bearer token123", "X-Custom-Header": "test-value"});
|
||||
|
||||
// No need to parse JSON again since it's already an object
|
||||
const parsedHeaders = config.OCO_API_CUSTOM_HEADERS;
|
||||
expect(parsedHeaders).toHaveProperty('Authorization', 'Bearer token123');
|
||||
expect(parsedHeaders).toHaveProperty('X-Custom-Header', 'test-value');
|
||||
expect(parsedHeaders).not.toHaveProperty('X-Global-Header');
|
||||
});
|
||||
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
|
||||
57
test/unit/removeContentTags.test.ts
Normal file
57
test/unit/removeContentTags.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { removeContentTags } from '../../src/utils/removeContentTags';
|
||||
|
||||
describe('removeContentTags', () => {
|
||||
it('should remove content wrapped in specified tags', () => {
|
||||
const content = 'This is <think>something to hide</think> visible content';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('This is visible content');
|
||||
});
|
||||
|
||||
it('should handle multiple tag occurrences', () => {
|
||||
const content = '<think>hidden</think> visible <think>also hidden</think> text';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('visible text');
|
||||
});
|
||||
|
||||
it('should handle multiline content within tags', () => {
|
||||
const content = 'Start <think>hidden\nover multiple\nlines</think> End';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('Start End');
|
||||
});
|
||||
|
||||
it('should return content as is when tag is not found', () => {
|
||||
const content = 'Content without any tags';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('Content without any tags');
|
||||
});
|
||||
|
||||
it('should work with different tag names', () => {
|
||||
const content = 'This is <custom>something to hide</custom> visible content';
|
||||
const result = removeContentTags(content, 'custom');
|
||||
expect(result).toBe('This is visible content');
|
||||
});
|
||||
|
||||
it('should handle null content', () => {
|
||||
const content = null;
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe(null);
|
||||
});
|
||||
|
||||
it('should handle undefined content', () => {
|
||||
const content = undefined;
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe(undefined);
|
||||
});
|
||||
|
||||
it('should trim the result', () => {
|
||||
const content = ' <think>hidden</think> visible ';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('visible');
|
||||
});
|
||||
|
||||
it('should handle nested tags correctly', () => {
|
||||
const content = 'Outside <think>Inside <think>Nested</think></think> End';
|
||||
const result = removeContentTags(content, 'think');
|
||||
expect(result).toBe('Outside End');
|
||||
});
|
||||
});
|
||||
@@ -3,10 +3,10 @@
|
||||
"target": "ES2020",
|
||||
"lib": ["ES6", "ES2020"],
|
||||
|
||||
"module": "CommonJS",
|
||||
"module": "NodeNext",
|
||||
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
"moduleResolution": "NodeNext",
|
||||
|
||||
"allowJs": true,
|
||||
|
||||
|
||||
Reference in New Issue
Block a user