mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-13 07:38:01 -05:00
Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9cf2db84a9 | ||
|
|
ec307d561f | ||
|
|
0ac7211ff7 | ||
|
|
670a758bee | ||
|
|
bdc98c6fa8 | ||
|
|
f0251d14bb | ||
|
|
61f1a27377 | ||
|
|
c39181e5bd | ||
|
|
45dc2c4535 | ||
|
|
a192441f68 | ||
|
|
744bb9b11d | ||
|
|
f3adc86693 | ||
|
|
714fac0637 |
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
.env
|
||||
46
.github/workflows/test.yml
vendored
Normal file
46
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Testing
|
||||
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Run Unit Tests
|
||||
run: npm run test:unit
|
||||
e2e-test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install git
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y git
|
||||
git --version
|
||||
- name: Setup git
|
||||
run: |
|
||||
git config --global user.email "test@example.com"
|
||||
git config --global user.name "Test User"
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
- name: Build
|
||||
run: npm run build
|
||||
- name: Run E2E Tests
|
||||
run: npm run test:e2e
|
||||
27
README.md
27
README.md
@@ -4,7 +4,7 @@
|
||||
<h1 align="center">OpenCommit</h1>
|
||||
<h4 align="center">Follow the bird <a href="https://twitter.com/_sukharev_"><img src="https://img.shields.io/twitter/follow/_sukharev_?style=flat&label=_sukharev_&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
|
||||
</div>
|
||||
<h2>Auto-generate meaningful commits in 1 second</h2>
|
||||
<h2>Auto-generate meaningful commits in a second</h2>
|
||||
<p>Killing lame commits with AI 🤯🔫</p>
|
||||
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
|
||||
<h4 align="center">🪩 Winner of <a href="https://twitter.com/_sukharev_/status/1683448136973582336">GitHub 2023 hackathon</a> 🪩</h4>
|
||||
@@ -68,20 +68,22 @@ You can also run it with local model through ollama:
|
||||
|
||||
```sh
|
||||
git add <files...>
|
||||
AI_PROVIDER='ollama' opencommit
|
||||
OCO_AI_PROVIDER='ollama' opencommit
|
||||
```
|
||||
|
||||
### Flags
|
||||
|
||||
There are multiple optional flags that can be used with the `oco` command:
|
||||
|
||||
#### Use Full GitMoji Specification
|
||||
|
||||
This flag can only be used if the `OCO_EMOJI` configuration item is set to `true`. This flag allows users to use all emojis in the GitMoji specification, By default, the GitMoji full specification is set to `false`, which only includes 10 emojis (🐛✨📝🚀✅♻️⬆️🔧🌐💡).
|
||||
This is due to limit the number of tokens sent in each request. However, if you would like to use the full GitMoji specification, you can use the `--fgm` flag.
|
||||
|
||||
```
|
||||
oco --fgm
|
||||
```
|
||||
|
||||
|
||||
## Configuration
|
||||
|
||||
### Local per repo configuration
|
||||
@@ -95,10 +97,11 @@ OCO_TOKENS_MAX_OUTPUT=<max response tokens (default: 500)>
|
||||
OCO_OPENAI_BASE_PATH=<may be used to set proxy path to OpenAI api>
|
||||
OCO_DESCRIPTION=<postface a message with ~3 sentences description of the changes>
|
||||
OCO_EMOJI=<boolean, add GitMoji>
|
||||
OCO_MODEL=<either 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'>
|
||||
OCO_MODEL=<either 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview' or 'gpt-4-0125-preview'>
|
||||
OCO_LANGUAGE=<locale, scroll to the bottom to see options>
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER=<message template placeholder, default: '$msg'>
|
||||
OCO_PROMPT_MODULE=<either conventional-commit or @commitlint, default: conventional-commit>
|
||||
OCO_ONE_LINE_COMMIT=<one line commit message, default: false>
|
||||
```
|
||||
|
||||
### Global config for all repos
|
||||
@@ -125,7 +128,7 @@ oco config set OCO_EMOJI=false
|
||||
|
||||
### Switch to GPT-4 or other models
|
||||
|
||||
By default, OpenCommit uses `gpt-3.5-turbo-16k` model.
|
||||
By default, OpenCommit uses `gpt-3.5-turbo` model.
|
||||
|
||||
You may switch to GPT-4 which performs better, but costs ~x15 times more 🤠
|
||||
|
||||
@@ -142,7 +145,7 @@ oco config set OCO_MODEL=gpt-3.5-turbo
|
||||
or for GPT-4 Turbo (Preview) which is more capable, has knowledge of world events up to April 2023, a 128k context window and 2-3x cheaper vs GPT-4:
|
||||
|
||||
```sh
|
||||
oco config set OCO_MODEL=gpt-4-1106-preview
|
||||
oco config set OCO_MODEL=gpt-4-0125-preview
|
||||
```
|
||||
|
||||
Make sure that you spell it `gpt-4` (lowercase) and that you have API access to the 4th model. Even if you have ChatGPT+, that doesn't necessarily mean that you have API access to GPT-4.
|
||||
@@ -166,6 +169,14 @@ oco config set OCO_LANGUAGE=française
|
||||
The default language setting is **English**
|
||||
All available languages are currently listed in the [i18n](https://github.com/di-sukharev/opencommit/tree/master/src/i18n) folder
|
||||
|
||||
### Push to git
|
||||
|
||||
Pushing to git is on by default but if you would like to turn it off just use:
|
||||
|
||||
```sh
|
||||
oc config set OCO_GITPUSH=false
|
||||
```
|
||||
|
||||
### Switch to `@commitlint`
|
||||
|
||||
OpenCommit allows you to choose the prompt module used to generate commit messages. By default, OpenCommit uses its conventional-commit message generator. However, you can switch to using the `@commitlint` prompt module if you prefer. This option lets you generate commit messages in respect with the local config.
|
||||
@@ -348,7 +359,7 @@ jobs:
|
||||
OCO_OPENAI_BASE_PATH: ''
|
||||
OCO_DESCRIPTION: false
|
||||
OCO_EMOJI: false
|
||||
OCO_MODEL: gpt-3.5-turbo-16k
|
||||
OCO_MODEL: gpt-3.5-turbo
|
||||
OCO_LANGUAGE: en
|
||||
OCO_PROMPT_MODULE: conventional-commit
|
||||
```
|
||||
@@ -365,6 +376,6 @@ You pay for your requests to OpenAI API on your own.
|
||||
|
||||
OpenCommit stores your key locally.
|
||||
|
||||
OpenCommit by default uses 3.5-turbo-16k model, it should not exceed $0.10 per casual working day.
|
||||
OpenCommit by default uses 3.5-turbo model, it should not exceed $0.10 per casual working day.
|
||||
|
||||
You may switch to gpt-4, it's better, but more expensive.
|
||||
|
||||
28
jest.config.ts
Normal file
28
jest.config.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* For a detailed explanation regarding each configuration property, visit:
|
||||
* https://jestjs.io/docs/configuration
|
||||
*/
|
||||
|
||||
import type { Config } from 'jest';
|
||||
|
||||
const config: Config = {
|
||||
testTimeout: 100_000,
|
||||
coverageProvider: 'v8',
|
||||
moduleDirectories: ['node_modules', 'src'],
|
||||
preset: 'ts-jest/presets/js-with-ts-esm',
|
||||
setupFilesAfterEnv: ['<rootDir>/test/jest-setup.ts'],
|
||||
testEnvironment: 'node',
|
||||
testRegex: ['.*\\.test\\.ts$'],
|
||||
transformIgnorePatterns: ['node_modules/(?!cli-testing-library)'],
|
||||
transform: {
|
||||
'^.+\\.(ts|tsx)$': [
|
||||
'ts-jest',
|
||||
{
|
||||
diagnostics: false,
|
||||
useESM: true
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
export default config;
|
||||
17421
out/cli.cjs
17421
out/cli.cjs
File diff suppressed because one or more lines are too long
30750
out/github-action.cjs
30750
out/github-action.cjs
File diff suppressed because one or more lines are too long
Binary file not shown.
6317
package-lock.json
generated
6317
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
15
package.json
15
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.0.11",
|
||||
"version": "3.0.15",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
@@ -47,19 +47,29 @@
|
||||
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
|
||||
"deploy": "npm version patch && npm run build:push && git push --tags && npm publish --tag latest",
|
||||
"lint": "eslint src --ext ts && tsc --noEmit",
|
||||
"format": "prettier --write src"
|
||||
"format": "prettier --write src",
|
||||
"test:all": "npm run test:unit:docker && npm run test:e2e:docker",
|
||||
"test:docker-build": "docker build -t oco-test -f test/Dockerfile .",
|
||||
"test:unit": "NODE_OPTIONS=--experimental-vm-modules jest test/unit",
|
||||
"test:unit:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:unit",
|
||||
"test:e2e": "jest test/e2e",
|
||||
"test:e2e:docker": "npm run test:docker-build && DOCKER_CONTENT_TRUST=0 docker run --rm oco-test npm run test:e2e"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/types": "^17.4.4",
|
||||
"@types/ini": "^1.3.31",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/node": "^16.18.14",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"cli-testing-library": "^2.0.2",
|
||||
"dotenv": "^16.0.3",
|
||||
"esbuild": "^0.15.18",
|
||||
"eslint": "^8.28.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^2.8.4",
|
||||
"ts-jest": "^29.1.2",
|
||||
"ts-node": "^10.9.1",
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
@@ -67,6 +77,7 @@
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^5.1.1",
|
||||
"@anthropic-ai/sdk": "^0.19.2",
|
||||
"@clack/prompts": "^0.6.1",
|
||||
"@dqbd/tiktoken": "^1.0.2",
|
||||
"@octokit/webhooks-schemas": "^6.11.0",
|
||||
|
||||
@@ -29,7 +29,7 @@ cli(
|
||||
if (await isHookCalled()) {
|
||||
prepareCommitMessageHook();
|
||||
} else {
|
||||
commit(extraArgs, flags.fgm);
|
||||
commit(extraArgs, false, flags.fgm);
|
||||
}
|
||||
},
|
||||
extraArgs
|
||||
|
||||
@@ -94,13 +94,17 @@ ${chalk.grey('——————————————————')}`
|
||||
|
||||
const remotes = await getGitRemotes();
|
||||
|
||||
// user isn't pushing, return early
|
||||
if (config?.OCO_GITPUSH === false)
|
||||
return
|
||||
|
||||
if (!remotes.length) {
|
||||
const { stdout } = await execa('git', ['push']);
|
||||
if (stdout) outro(stdout);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (remotes.length === 1) {
|
||||
if (remotes.length === 1 && config?.OCO_GITPUSH !== true) {
|
||||
const isPushConfirmedByUser = await confirm({
|
||||
message: 'Do you want to run `git push`?'
|
||||
});
|
||||
|
||||
@@ -4,17 +4,16 @@ import * as dotenv from 'dotenv';
|
||||
import { existsSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { parse as iniParse, stringify as iniStringify } from 'ini';
|
||||
import { homedir } from 'os';
|
||||
import { join as pathJoin } from 'path';
|
||||
import { join as pathJoin, resolve as pathResolve } from 'path';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import { COMMANDS } from '../CommandsEnum';
|
||||
import { getI18nLocal } from '../i18n';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export enum CONFIG_KEYS {
|
||||
OCO_OPENAI_API_KEY = 'OCO_OPENAI_API_KEY',
|
||||
OCO_ANTHROPIC_API_KEY = 'OCO_ANTHROPIC_API_KEY',
|
||||
OCO_TOKENS_MAX_INPUT = 'OCO_TOKENS_MAX_INPUT',
|
||||
OCO_TOKENS_MAX_OUTPUT = 'OCO_TOKENS_MAX_OUTPUT',
|
||||
OCO_OPENAI_BASE_PATH = 'OCO_OPENAI_BASE_PATH',
|
||||
@@ -25,6 +24,8 @@ export enum CONFIG_KEYS {
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
|
||||
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
|
||||
OCO_GITPUSH = 'OCO_GITPUSH',
|
||||
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT'
|
||||
}
|
||||
|
||||
export enum CONFIG_MODES {
|
||||
@@ -32,6 +33,31 @@ export enum CONFIG_MODES {
|
||||
set = 'set'
|
||||
}
|
||||
|
||||
export const MODEL_LIST = {
|
||||
openai: ['gpt-3.5-turbo',
|
||||
'gpt-3.5-turbo-0125',
|
||||
'gpt-4',
|
||||
'gpt-4-turbo',
|
||||
'gpt-4-1106-preview',
|
||||
'gpt-4-turbo-preview',
|
||||
'gpt-4-0125-preview'],
|
||||
|
||||
anthropic: ['claude-3-haiku-20240307',
|
||||
'claude-3-sonnet-20240229',
|
||||
'claude-3-opus-20240229']
|
||||
}
|
||||
|
||||
const getDefaultModel = (provider: string | undefined): string => {
|
||||
switch (provider) {
|
||||
case 'ollama':
|
||||
return '';
|
||||
case 'anthropic':
|
||||
return MODEL_LIST.anthropic[0];
|
||||
default:
|
||||
return MODEL_LIST.openai[0];
|
||||
}
|
||||
};
|
||||
|
||||
export enum DEFAULT_TOKEN_LIMITS {
|
||||
DEFAULT_MAX_TOKENS_INPUT = 4096,
|
||||
DEFAULT_MAX_TOKENS_OUTPUT = 500
|
||||
@@ -54,16 +80,25 @@ const validateConfig = (
|
||||
export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
|
||||
//need api key unless running locally with ollama
|
||||
validateConfig('API_KEY', value || config.OCO_AI_PROVIDER == 'ollama', 'You need to provide an API key');
|
||||
validateConfig(
|
||||
'OpenAI API_KEY',
|
||||
value || config.OCO_ANTHROPIC_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
|
||||
'You need to provide an OpenAI/Anthropic API key'
|
||||
);
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_API_KEY,
|
||||
value.startsWith('sk-'),
|
||||
'Must start with "sk-"'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ANTHROPIC_API_KEY](value: any, config: any = {}) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_OPENAI_API_KEY,
|
||||
config[CONFIG_KEYS.OCO_OPENAI_BASE_PATH] || value.length === 51,
|
||||
'Must be 51 characters long'
|
||||
'ANTHROPIC_API_KEY',
|
||||
value || config.OCO_OPENAI_API_KEY || config.OCO_AI_PROVIDER == 'ollama' || config.OCO_AI_PROVIDER == 'test',
|
||||
'You need to provide an OpenAI/Anthropic API key'
|
||||
);
|
||||
|
||||
return value;
|
||||
@@ -148,17 +183,12 @@ export const configValidators = {
|
||||
[CONFIG_KEYS.OCO_MODEL](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_MODEL,
|
||||
[
|
||||
'gpt-3.5-turbo',
|
||||
'gpt-4',
|
||||
'gpt-3.5-turbo-16k',
|
||||
'gpt-3.5-turbo-0613',
|
||||
'gpt-4-1106-preview'
|
||||
].includes(value),
|
||||
`${value} is not supported yet, use 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo' or 'gpt-4-1106-preview'`
|
||||
[...MODEL_LIST.openai, ...MODEL_LIST.anthropic].includes(value),
|
||||
`${value} is not supported yet, use 'gpt-4', 'gpt-4-turbo', 'gpt-3.5-turbo' (default), 'gpt-3.5-turbo-0125', 'gpt-4-1106-preview', 'gpt-4-turbo-preview', 'gpt-4-0125-preview', 'claude-3-opus-20240229', 'claude-3-sonnet-20240229' or 'claude-3-haiku-20240307'`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER,
|
||||
@@ -174,7 +204,15 @@ export const configValidators = {
|
||||
['conventional-commit', '@commitlint'].includes(value),
|
||||
`${value} is not supported yet, use '@commitlint' or 'conventional-commit' (default)`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_GITPUSH](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_GITPUSH,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
@@ -184,23 +222,44 @@ export const configValidators = {
|
||||
[
|
||||
'',
|
||||
'openai',
|
||||
'ollama'
|
||||
'anthropic',
|
||||
'ollama',
|
||||
'test'
|
||||
].includes(value),
|
||||
`${value} is not supported yet, use 'ollama' or 'openai' (default)`
|
||||
`${value} is not supported yet, use 'ollama' 'anthropic' or 'openai' (default)`
|
||||
);
|
||||
return value;
|
||||
},
|
||||
|
||||
[CONFIG_KEYS.OCO_ONE_LINE_COMMIT](value: any) {
|
||||
validateConfig(
|
||||
CONFIG_KEYS.OCO_ONE_LINE_COMMIT,
|
||||
typeof value === 'boolean',
|
||||
'Must be true or false'
|
||||
);
|
||||
|
||||
return value;
|
||||
},
|
||||
};
|
||||
|
||||
export type ConfigType = {
|
||||
[key in CONFIG_KEYS]?: any;
|
||||
};
|
||||
|
||||
const configPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultConfigPath = pathJoin(homedir(), '.opencommit');
|
||||
const defaultEnvPath = pathResolve(process.cwd(), '.env');
|
||||
|
||||
export const getConfig = (): ConfigType | null => {
|
||||
export const getConfig = ({
|
||||
configPath = defaultConfigPath,
|
||||
envPath = defaultEnvPath
|
||||
}: {
|
||||
configPath?: string
|
||||
envPath?: string
|
||||
} = {}): ConfigType | null => {
|
||||
dotenv.config({ path: envPath });
|
||||
const configFromEnv = {
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
OCO_TOKENS_MAX_INPUT: process.env.OCO_TOKENS_MAX_INPUT
|
||||
? Number(process.env.OCO_TOKENS_MAX_INPUT)
|
||||
: undefined,
|
||||
@@ -210,12 +269,14 @@ export const getConfig = (): ConfigType | null => {
|
||||
OCO_OPENAI_BASE_PATH: process.env.OCO_OPENAI_BASE_PATH,
|
||||
OCO_DESCRIPTION: process.env.OCO_DESCRIPTION === 'true' ? true : false,
|
||||
OCO_EMOJI: process.env.OCO_EMOJI === 'true' ? true : false,
|
||||
OCO_MODEL: process.env.OCO_MODEL || 'gpt-3.5-turbo-16k',
|
||||
OCO_MODEL: process.env.OCO_MODEL || getDefaultModel(process.env.OCO_AI_PROVIDER),
|
||||
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
|
||||
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
|
||||
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit',
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai'
|
||||
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai',
|
||||
OCO_GITPUSH: process.env.OCO_GITPUSH === 'false' ? false : true,
|
||||
OCO_ONE_LINE_COMMIT: process.env.OCO_ONE_LINE_COMMIT === 'true' ? true : false
|
||||
};
|
||||
|
||||
const configExists = existsSync(configPath);
|
||||
@@ -226,7 +287,6 @@ export const getConfig = (): ConfigType | null => {
|
||||
|
||||
for (const configKey of Object.keys(config)) {
|
||||
if (
|
||||
!config[configKey] ||
|
||||
['null', 'undefined'].includes(config[configKey])
|
||||
) {
|
||||
config[configKey] = undefined;
|
||||
@@ -252,7 +312,7 @@ export const getConfig = (): ConfigType | null => {
|
||||
return config;
|
||||
};
|
||||
|
||||
export const setConfig = (keyValues: [key: string, value: string][]) => {
|
||||
export const setConfig = (keyValues: [key: string, value: string][], configPath: string = defaultConfigPath) => {
|
||||
const config = getConfig() || {};
|
||||
|
||||
for (const [configKey, configValue] of keyValues) {
|
||||
|
||||
124
src/engine/anthropic.ts
Normal file
124
src/engine/anthropic.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
|
||||
import Anthropic from '@anthropic-ai/sdk';
|
||||
import {ChatCompletionRequestMessage} from 'openai'
|
||||
import { MessageCreateParamsNonStreaming, MessageParam } from '@anthropic-ai/sdk/resources';
|
||||
|
||||
import { intro, outro } from '@clack/prompts';
|
||||
|
||||
import {
|
||||
CONFIG_MODES,
|
||||
DEFAULT_TOKEN_LIMITS,
|
||||
getConfig
|
||||
} from '../commands/config';
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine } from './Engine';
|
||||
import { MODEL_LIST } from '../commands/config';
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
const MAX_TOKENS_OUTPUT =
|
||||
config?.OCO_TOKENS_MAX_OUTPUT ||
|
||||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
const MAX_TOKENS_INPUT =
|
||||
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
|
||||
let provider = config?.OCO_AI_PROVIDER;
|
||||
let apiKey = config?.OCO_ANTHROPIC_API_KEY;
|
||||
const [command, mode] = process.argv.slice(2);
|
||||
if (
|
||||
provider === 'anthropic' &&
|
||||
!apiKey &&
|
||||
command !== 'config' &&
|
||||
mode !== CONFIG_MODES.set
|
||||
) {
|
||||
intro('opencommit');
|
||||
|
||||
outro(
|
||||
'OCO_ANTHROPIC_API_KEY is not set, please run `oco config set OCO_ANTHROPIC_API_KEY=<your token> . If you are using Claude, make sure you add payment details, so API works.`'
|
||||
);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const MODEL = config?.OCO_MODEL;
|
||||
if (provider === 'anthropic' &&
|
||||
!MODEL_LIST.anthropic.includes(MODEL) &&
|
||||
command !== 'config' &&
|
||||
mode !== CONFIG_MODES.set) {
|
||||
outro(
|
||||
`${chalk.red('✖')} Unsupported model ${MODEL} for Anthropic. Supported models are: ${MODEL_LIST.anthropic.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
class AnthropicAi implements AiEngine {
|
||||
private anthropicAiApiConfiguration = {
|
||||
apiKey: apiKey
|
||||
};
|
||||
private anthropicAI!: Anthropic;
|
||||
|
||||
constructor() {
|
||||
this.anthropicAI = new Anthropic(this.anthropicAiApiConfiguration);
|
||||
}
|
||||
|
||||
public generateCommitMessage = async (
|
||||
messages: Array<ChatCompletionRequestMessage>
|
||||
): Promise<string | undefined> => {
|
||||
|
||||
const systemMessage = messages.find(msg => msg.role === 'system')?.content as string;
|
||||
const restMessages = messages.filter((msg) => msg.role !== 'system') as MessageParam[];
|
||||
|
||||
const params: MessageCreateParamsNonStreaming = {
|
||||
model: MODEL,
|
||||
system: systemMessage,
|
||||
messages: restMessages,
|
||||
temperature: 0,
|
||||
top_p: 0.1,
|
||||
max_tokens: MAX_TOKENS_OUTPUT
|
||||
};
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
.reduce((a, b) => a + b, 0);
|
||||
|
||||
if (REQUEST_TOKENS > MAX_TOKENS_INPUT - MAX_TOKENS_OUTPUT) {
|
||||
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
|
||||
}
|
||||
|
||||
const data = await this.anthropicAI.messages.create(params);
|
||||
|
||||
const message = data?.content[0].text;
|
||||
|
||||
return message;
|
||||
} catch (error) {
|
||||
outro(`${chalk.red('✖')} ${JSON.stringify(params)}`);
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
|
||||
if (
|
||||
axios.isAxiosError<{ error?: { message: string } }>(error) &&
|
||||
error.response?.status === 401
|
||||
) {
|
||||
const anthropicAiError = error.response.data.error;
|
||||
|
||||
if (anthropicAiError?.message) outro(anthropicAiError.message);
|
||||
outro(
|
||||
'For help look into README https://github.com/di-sukharev/opencommit#setup'
|
||||
);
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export const anthropicAi = new AnthropicAi();
|
||||
@@ -8,14 +8,14 @@ export class OllamaAi implements AiEngine {
|
||||
): Promise<string | undefined> {
|
||||
const model = 'mistral'; // todo: allow other models
|
||||
|
||||
let prompt = messages.map((x) => x.content).join('\n');
|
||||
//hoftix: local models are not so clever so im changing the prompt a bit...
|
||||
prompt += 'Summarize above git diff in 10 words or less';
|
||||
//console.log(messages);
|
||||
//process.exit()
|
||||
|
||||
const url = 'http://localhost:11434/api/generate';
|
||||
const url = 'http://localhost:11434/api/chat';
|
||||
const p = {
|
||||
model,
|
||||
prompt,
|
||||
messages,
|
||||
options: {temperature: 0, top_p: 0.1},
|
||||
stream: false
|
||||
};
|
||||
try {
|
||||
@@ -24,8 +24,10 @@ export class OllamaAi implements AiEngine {
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
});
|
||||
const answer = response.data?.response;
|
||||
return answer;
|
||||
|
||||
const message = response.data.message;
|
||||
|
||||
return message?.content;
|
||||
} catch (err: any) {
|
||||
const message = err.response?.data?.error ?? err.message;
|
||||
throw new Error('local model issues. details: ' + message);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import axios from 'axios';
|
||||
import chalk from 'chalk';
|
||||
import { execa } from 'execa';
|
||||
|
||||
import {
|
||||
ChatCompletionRequestMessage,
|
||||
Configuration as OpenAiApiConfiguration,
|
||||
@@ -17,20 +18,28 @@ import {
|
||||
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
|
||||
import { tokenCount } from '../utils/tokenCount';
|
||||
import { AiEngine } from './Engine';
|
||||
import { MODEL_LIST } from '../commands/config';
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
const MAX_TOKENS_OUTPUT = config?.OCO_TOKENS_MAX_OUTPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
const MAX_TOKENS_INPUT = config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
const MAX_TOKENS_OUTPUT =
|
||||
config?.OCO_TOKENS_MAX_OUTPUT ||
|
||||
DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT;
|
||||
const MAX_TOKENS_INPUT =
|
||||
config?.OCO_TOKENS_MAX_INPUT || DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT;
|
||||
let basePath = config?.OCO_OPENAI_BASE_PATH;
|
||||
let apiKey = config?.OCO_OPENAI_API_KEY
|
||||
let apiKey = config?.OCO_OPENAI_API_KEY;
|
||||
|
||||
const [command, mode] = process.argv.slice(2);
|
||||
|
||||
const isLocalModel = config?.OCO_AI_PROVIDER == 'ollama'
|
||||
const provider = config?.OCO_AI_PROVIDER;
|
||||
|
||||
|
||||
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalModel) {
|
||||
if (
|
||||
provider === 'openai' &&
|
||||
!apiKey &&
|
||||
command !== 'config' &&
|
||||
mode !== CONFIG_MODES.set
|
||||
) {
|
||||
intro('opencommit');
|
||||
|
||||
outro(
|
||||
@@ -44,6 +53,18 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalMode
|
||||
}
|
||||
|
||||
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
|
||||
if (provider === 'openai' &&
|
||||
!MODEL_LIST.openai.includes(MODEL) &&
|
||||
command !== 'config' &&
|
||||
mode !== CONFIG_MODES.set) {
|
||||
outro(
|
||||
`${chalk.red('✖')} Unsupported model ${MODEL} for OpenAI. Supported models are: ${MODEL_LIST.openai.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
class OpenAi implements AiEngine {
|
||||
private openAiApiConfiguration = new OpenAiApiConfiguration({
|
||||
@@ -105,6 +126,4 @@ class OpenAi implements AiEngine {
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
||||
export const api = new OpenAi();
|
||||
|
||||
12
src/engine/testAi.ts
Normal file
12
src/engine/testAi.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { ChatCompletionRequestMessage } from 'openai';
|
||||
import { AiEngine } from './Engine';
|
||||
|
||||
export class TestAi implements AiEngine {
|
||||
async generateCommitMessage(
|
||||
messages: Array<ChatCompletionRequestMessage>
|
||||
): Promise<string | undefined> {
|
||||
return 'test commit message';
|
||||
}
|
||||
}
|
||||
|
||||
export const testAi = new TestAi();
|
||||
@@ -267,6 +267,7 @@ const INIT_MAIN_PROMPT = (
|
||||
${config?.OCO_EMOJI ? 'Use GitMoji convention to preface the commit.' : 'Do not preface the commit with anything.'}
|
||||
${config?.OCO_DESCRIPTION ? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.' : "Don't add any descriptions to the commit, only commit message."}
|
||||
Use the present tense. Use ${language} to answer.
|
||||
${ config?.OCO_ONE_LINE_COMMIT ? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.' : ""}
|
||||
|
||||
You will strictly follow the following conventions to generate the content of the commit message:
|
||||
- ${prompts.join('\n- ')}
|
||||
|
||||
@@ -115,6 +115,11 @@ const INIT_MAIN_PROMPT = (
|
||||
? 'Add a short description of WHY the changes are done after the commit message. Don\'t start it with "This commit", just describe the changes.'
|
||||
: "Don't add any descriptions to the commit, only commit message."
|
||||
}
|
||||
${
|
||||
config?.OCO_ONE_LINE_COMMIT
|
||||
? 'Craft a concise commit message that encapsulates all changes made, with an emphasis on the primary updates. If the modifications share a common theme or scope, mention it succinctly; otherwise, leave the scope out to maintain focus. The goal is to provide a clear and unified overview of the changes in a one single message, without diverging into a list of commit per file change.'
|
||||
: ""
|
||||
}
|
||||
Use the present tense. Lines must not be longer than 74 characters. Use ${language} for the commit message.`
|
||||
});
|
||||
|
||||
|
||||
@@ -2,11 +2,17 @@ import { AiEngine } from '../engine/Engine';
|
||||
import { api } from '../engine/openAi';
|
||||
import { getConfig } from '../commands/config';
|
||||
import { ollamaAi } from '../engine/ollama';
|
||||
import { anthropicAi } from '../engine/anthropic'
|
||||
import { testAi } from '../engine/testAi';
|
||||
|
||||
export function getEngine(): AiEngine {
|
||||
const config = getConfig();
|
||||
if (config?.OCO_AI_PROVIDER == 'ollama') {
|
||||
return ollamaAi;
|
||||
} else if (config?.OCO_AI_PROVIDER == 'anthropic') {
|
||||
return anthropicAi;
|
||||
} else if (config?.OCO_AI_PROVIDER == 'test') {
|
||||
return testAi;
|
||||
}
|
||||
//open ai gpt by default
|
||||
return api;
|
||||
|
||||
@@ -75,8 +75,11 @@ export const getChangedFiles = async (): Promise<string[]> => {
|
||||
|
||||
export const gitAdd = async ({ files }: { files: string[] }) => {
|
||||
const gitAddSpinner = spinner();
|
||||
|
||||
gitAddSpinner.start('Adding files to commit');
|
||||
|
||||
await execa('git', ['add', ...files]);
|
||||
|
||||
gitAddSpinner.stop('Done');
|
||||
};
|
||||
|
||||
|
||||
19
test/Dockerfile
Normal file
19
test/Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM ubuntu:latest
|
||||
|
||||
RUN apt-get update && apt-get install -y curl git
|
||||
|
||||
# Install Node.js v20
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Setup git
|
||||
RUN git config --global user.email "test@example.com"
|
||||
RUN git config --global user.name "Test User"
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN ls -la
|
||||
|
||||
RUN npm install
|
||||
RUN npm run build
|
||||
13
test/e2e/noChanges.test.ts
Normal file
13
test/e2e/noChanges.test.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
|
||||
it('cli flow when there are no changes', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
const { findByText } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
expect(await findByText('No changes detected')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
56
test/e2e/oneFile.test.ts
Normal file
56
test/e2e/oneFile.test.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import { resolve } from 'path'
|
||||
import { render } from 'cli-testing-library'
|
||||
import 'cli-testing-library/extend-expect';
|
||||
import { prepareEnvironment } from './utils';
|
||||
|
||||
it('cli flow to generate commit message for 1 new file (staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
|
||||
const { queryByText, findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
|
||||
expect(await queryByText('No files are staged')).not.toBeInTheConsole();
|
||||
expect(await queryByText('Do you want to stage all files and generate commit message?')).not.toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
|
||||
it('cli flow to generate commit message for 1 changed file (not staged)', async () => {
|
||||
const { gitDir, cleanup } = await prepareEnvironment();
|
||||
|
||||
await render('echo' ,[`'console.log("Hello World");' > index.ts`], { cwd: gitDir });
|
||||
await render('git' ,['add index.ts'], { cwd: gitDir });
|
||||
await render('git' ,[`commit -m 'add new file'`], { cwd: gitDir });
|
||||
|
||||
await render('echo' ,[`'console.log("Good night World");' >> index.ts`], { cwd: gitDir });
|
||||
|
||||
const { findByText, userEvent } = await render(`OCO_AI_PROVIDER='test' node`, [resolve('./out/cli.cjs')], { cwd: gitDir });
|
||||
|
||||
expect(await findByText('No files are staged')).toBeInTheConsole();
|
||||
expect(await findByText('Do you want to stage all files and generate commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Generating the commit message')).toBeInTheConsole();
|
||||
expect(await findByText('Confirm the commit message?')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully committed')).toBeInTheConsole();
|
||||
|
||||
expect(await findByText('Choose a remote to push to')).toBeInTheConsole();
|
||||
userEvent.keyboard('[Enter]');
|
||||
|
||||
expect(await findByText('Successfully pushed all commits to origin')).toBeInTheConsole();
|
||||
|
||||
await cleanup();
|
||||
});
|
||||
31
test/e2e/utils.ts
Normal file
31
test/e2e/utils.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import path from 'path'
|
||||
import { mkdtemp, rm } from 'fs'
|
||||
import { promisify } from 'util';
|
||||
import { tmpdir } from 'os';
|
||||
import { exec } from 'child_process';
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsExec = promisify(exec);
|
||||
const fsRemove = promisify(rm);
|
||||
|
||||
/**
|
||||
* Prepare the environment for the test
|
||||
* Create a temporary git repository in the temp directory
|
||||
*/
|
||||
export const prepareEnvironment = async (): Promise<{
|
||||
gitDir: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> => {
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
// Create a remote git repository int the temp directory. This is necessary to execute the `git push` command
|
||||
await fsExec('git init --bare remote.git', { cwd: tempDir });
|
||||
await fsExec('git clone remote.git test', { cwd: tempDir });
|
||||
const gitDir = path.resolve(tempDir, 'test');
|
||||
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
}
|
||||
return {
|
||||
gitDir,
|
||||
cleanup,
|
||||
}
|
||||
}
|
||||
7
test/jest-setup.ts
Normal file
7
test/jest-setup.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import 'cli-testing-library/extend-expect'
|
||||
import { configure } from 'cli-testing-library'
|
||||
|
||||
/**
|
||||
* Adjusted the wait time for waitFor/findByText to 2000ms, because the default 1000ms makes the test results flaky
|
||||
*/
|
||||
configure({ asyncUtilTimeout: 2000 })
|
||||
105
test/unit/config.test.ts
Normal file
105
test/unit/config.test.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { getConfig } from '../../src/commands/config';
|
||||
import { prepareFile } from './utils';
|
||||
|
||||
describe('getConfig', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
function resetEnv(env: NodeJS.ProcessEnv) {
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
if (!(key in env)) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = env[key];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
resetEnv(originalEnv);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
resetEnv(originalEnv);
|
||||
});
|
||||
|
||||
it('return config values from the global config file', async () => {
|
||||
const configFile = await prepareFile(
|
||||
'.opencommit',
|
||||
`
|
||||
OCO_OPENAI_API_KEY="sk-key"
|
||||
OCO_ANTHROPIC_API_KEY="secret-key"
|
||||
OCO_TOKENS_MAX_INPUT="8192"
|
||||
OCO_TOKENS_MAX_OUTPUT="1000"
|
||||
OCO_OPENAI_BASE_PATH="/openai/api"
|
||||
OCO_DESCRIPTION="true"
|
||||
OCO_EMOJI="true"
|
||||
OCO_MODEL="gpt-4"
|
||||
OCO_LANGUAGE="de"
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m"
|
||||
OCO_PROMPT_MODULE="@commitlint"
|
||||
OCO_AI_PROVIDER="ollama"
|
||||
OCO_GITPUSH="false"
|
||||
OCO_ONE_LINE_COMMIT="true"
|
||||
`
|
||||
);
|
||||
const config = getConfig({ configPath: configFile.filePath, envPath: '' });
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key');
|
||||
expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key');
|
||||
expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192);
|
||||
expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000);
|
||||
expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api');
|
||||
expect(config!['OCO_DESCRIPTION']).toEqual(true);
|
||||
expect(config!['OCO_EMOJI']).toEqual(true);
|
||||
expect(config!['OCO_MODEL']).toEqual('gpt-4');
|
||||
expect(config!['OCO_LANGUAGE']).toEqual('de');
|
||||
expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m');
|
||||
expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint');
|
||||
expect(config!['OCO_AI_PROVIDER']).toEqual('ollama');
|
||||
expect(config!['OCO_GITPUSH']).toEqual(false);
|
||||
expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true);
|
||||
|
||||
await configFile.cleanup();
|
||||
});
|
||||
|
||||
it('return config values from the local env file', async () => {
|
||||
const envFile = await prepareFile(
|
||||
'.env',
|
||||
`
|
||||
OCO_OPENAI_API_KEY="sk-key"
|
||||
OCO_ANTHROPIC_API_KEY="secret-key"
|
||||
OCO_TOKENS_MAX_INPUT="8192"
|
||||
OCO_TOKENS_MAX_OUTPUT="1000"
|
||||
OCO_OPENAI_BASE_PATH="/openai/api"
|
||||
OCO_DESCRIPTION="true"
|
||||
OCO_EMOJI="true"
|
||||
OCO_MODEL="gpt-4"
|
||||
OCO_LANGUAGE="de"
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER="$m"
|
||||
OCO_PROMPT_MODULE="@commitlint"
|
||||
OCO_AI_PROVIDER="ollama"
|
||||
OCO_GITPUSH="false"
|
||||
OCO_ONE_LINE_COMMIT="true"
|
||||
`
|
||||
);
|
||||
const config = getConfig({ configPath: '', envPath: envFile.filePath });
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config!['OCO_OPENAI_API_KEY']).toEqual('sk-key');
|
||||
expect(config!['OCO_ANTHROPIC_API_KEY']).toEqual('secret-key');
|
||||
expect(config!['OCO_TOKENS_MAX_INPUT']).toEqual(8192);
|
||||
expect(config!['OCO_TOKENS_MAX_OUTPUT']).toEqual(1000);
|
||||
expect(config!['OCO_OPENAI_BASE_PATH']).toEqual('/openai/api');
|
||||
expect(config!['OCO_DESCRIPTION']).toEqual(true);
|
||||
expect(config!['OCO_EMOJI']).toEqual(true);
|
||||
expect(config!['OCO_MODEL']).toEqual('gpt-4');
|
||||
expect(config!['OCO_LANGUAGE']).toEqual('de');
|
||||
expect(config!['OCO_MESSAGE_TEMPLATE_PLACEHOLDER']).toEqual('$m');
|
||||
expect(config!['OCO_PROMPT_MODULE']).toEqual('@commitlint');
|
||||
expect(config!['OCO_AI_PROVIDER']).toEqual('ollama');
|
||||
expect(config!['OCO_GITPUSH']).toEqual(false);
|
||||
expect(config!['OCO_ONE_LINE_COMMIT']).toEqual(true);
|
||||
|
||||
await envFile.cleanup();
|
||||
});
|
||||
});
|
||||
29
test/unit/utils.ts
Normal file
29
test/unit/utils.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import path from 'path';
|
||||
import { mkdtemp, rm, writeFile } from 'fs';
|
||||
import { promisify } from 'util';
|
||||
import { tmpdir } from 'os';
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsRemove = promisify(rm);
|
||||
const fsWriteFile = promisify(writeFile);
|
||||
|
||||
/**
|
||||
* Prepare tmp file for the test
|
||||
*/
|
||||
export async function prepareFile(
|
||||
fileName: string,
|
||||
content: string
|
||||
): Promise<{
|
||||
filePath: string;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const tempDir = await fsMakeTempDir(path.join(tmpdir(), 'opencommit-test-'));
|
||||
const filePath = path.resolve(tempDir, fileName);
|
||||
await fsWriteFile(filePath, content);
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
};
|
||||
return {
|
||||
filePath,
|
||||
cleanup
|
||||
};
|
||||
}
|
||||
@@ -21,6 +21,9 @@
|
||||
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": [
|
||||
"test/jest-setup.ts"
|
||||
],
|
||||
"exclude": ["node_modules"],
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
|
||||
Reference in New Issue
Block a user