Compare commits

..

3 Commits

Author SHA1 Message Date
di-sukharev
1b25aabbb8 configure jest 2023-09-03 17:12:22 +08:00
di-sukharev
e6a145841c add stupid tests 2023-09-03 17:12:17 +08:00
di-sukharev
99975c154e improve typing 2023-09-03 17:12:09 +08:00
21 changed files with 7139 additions and 2979 deletions

28
.github/workflows/stale.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
# This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
#
# You can adjust the behavior by modifying this file.
# For more information, see:
# https://github.com/actions/stale
name: Mark stale issues and pull requests
on:
schedule:
- cron: '27 21 * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 40
stale-issue-message: 'Stale issue message'
stale-pr-message: 'Stale pull request message'
stale-issue-label: 'no-issue-activity'
stale-pr-label: 'no-pr-activity'

View File

@@ -2,12 +2,13 @@
<div>
<img src=".github/logo-grad.svg" alt="OpenCommit logo"/>
<h1 align="center">OpenCommit</h1>
<h4 align="center">Follow the bird <a href="https://twitter.com/_sukharev_"><img src="https://img.shields.io/twitter/follow/_sukharev_?style=flat&label=_sukharev_&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
<h4 align="center">Follow the bird <a href="https://twitter.com/io_Y_oi"><img src="https://img.shields.io/twitter/follow/io_Y_oi?style=flat&label=io_Y_oi&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
</div>
<h2>Auto-generate meaningful commits in 1 second</h2>
<p>Killing lame commits with AI 🤯🔫</p>
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
<h4 align="center">🪩 Winner of <a href="https://twitter.com/_sukharev_/status/1683448136973582336">GitHub 2023 hackathon</a> 🪩</h4>
<h4 align="center">🪩 Winner of GitHub 2023 HACKATHON <a href="https://twitter.com/io_Y_oi/status/1683448136973582336"><img style="width:14px; height:14px; margin-top: -4px" src=".github/github-mark-white.png" align="center"></a>
</h4>
</div>
---
@@ -28,10 +29,6 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
npm install -g opencommit
```
Alternatively run it via `npx opencommit` or `bunx opencommit`
MacOS may ask to run the command with `sudo` when installing a package globally.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
3. Set the key to OpenCommit config:
@@ -58,17 +55,6 @@ git add <files...>
oco
```
You can also run it with local model through ollama:
- install and start ollama
- run `ollama run mistral` (do this only once, to pull model)
- run (in your project directory):
```sh
git add <files...>
AI_PROVIDER='ollama' opencommit
```
## Configuration
### Local per repo configuration
@@ -125,12 +111,6 @@ or for as a cheaper option:
oco config set OCO_MODEL=gpt-3.5-turbo
```
or for GPT-4 Turbo (Preview) which is more capable, has knowledge of world events up to April 2023, a 128k context window and 2-3x cheaper vs GPT-4:
```sh
oco config set OCO_MODEL=gpt-4-1106-preview
```
Make sure that you spell it `gpt-4` (lowercase) and that you have API access to the 4th model. Even if you have ChatGPT+, that doesn't necessarily mean that you have API access to GPT-4.
### Locale configuration
@@ -350,6 +330,4 @@ You pay for your requests to OpenAI API on your own.
OpenCommit stores your key locally.
OpenCommit by default uses 3.5-turbo-16k model, it should not exceed $0.10 per casual working day.
You may switch to gpt-4, it's better, but more expensive.
OpenCommit by default uses ChatGPT (3.5-turbo-16k) official model, which is a lot cheaper than gpt-4.

12
__tests__/cli.test.ts Normal file
View File

@@ -0,0 +1,12 @@
// @ts-ignore
// import { jest } from '@jest/globals';
import { generateCommitMessageByDiff } from '../src/generateCommitMessageFromGitDiff';
test.skip('generateCommitMessageFromGitDiff', async () => {
const GIT_DIFF = ``;
const res = await generateCommitMessageByDiff(GIT_DIFF);
expect(res).toBe('lol');
});

View File

@@ -0,0 +1,79 @@
import { getCommitMsgsPromisesFromFileDiffs } from '../src/generateCommitMessageFromGitDiff';
const oneFileThreeChanges = `diff --git a/example.txt b/example.txt
index e69de29..3f6a3fa 100644
--- a/example.txt
+++ b/example.txt
@@ -1,2 +1,2 @@
-Hello, World!
+Hello, everyone!
This is an example file.
@@ -4,2 +4,2 @@
-Goodbye, World!
+Goodbye, everyone!
Have a great day!
@@ -7,2 +7,2 @@
-It's a sunny day!
+It's a rainy day!
Let's go for a walk.`;
const fourFilesOneChangeEach = `diff --git a/file1.txt b/file1.txt
index e69de29..3f6a3fa 100644
--- a/file1.txt
+++ b/file1.txt
@@ -1,2 +1,2 @@
-Hello, World!
+Hello, everyone!
This is file 1.
diff --git a/file2.txt b/file2.txt
index 87c0ddc..d7b182e 100644
--- a/file2.txt
+++ b/file2.txt
@@ -1,3 +1,3 @@
This is file 2.
-Goodbye, World!
+Goodbye, everyone!
Have a great day!
diff --git a/file3.txt b/file3.txt
index e69de29..3f6a3fa 100644
--- a/file3.txt
+++ b/file3.txt
@@ -1,4 +1,4 @@
This is file 3.
-It's a sunny day!
+It's a rainy day!
Let's go for a walk.
diff --git a/file4.txt b/file4.txt
index 3f6a3fa..87c0ddc 100644
--- a/file4.txt
+++ b/file4.txt
@@ -1,5 +1,5 @@
This is file 4.
-It's time to sleep.
+It's time to wake up.
Goodnight.
`;
test('1', async () => {
const MAX_LENGTH = 50;
const oneFile3Changes = await getCommitMsgsPromisesFromFileDiffs(
oneFileThreeChanges,
MAX_LENGTH
);
expect(oneFile3Changes).toBe('lol');
});
test('2', async () => {
const MAX_LENGTH = 50;
const fourFilesOneChange = await getCommitMsgsPromisesFromFileDiffs(
fourFilesOneChangeEach,
MAX_LENGTH
);
expect(fourFilesOneChange).toBe('lol');
});

21
jest.config.ts Normal file
View File

@@ -0,0 +1,21 @@
import type { JestConfigWithTsJest } from 'ts-jest';
const jestConfig: JestConfigWithTsJest = {
// [...]
extensionsToTreatAsEsm: ['.ts'],
moduleNameMapper: {
'^(\\.{1,2}/.*)\\.js$': '$1'
},
transform: {
// '^.+\\.[tj]sx?$' to process js/ts with `ts-jest`
// '^.+\\.m?[tj]sx?$' to process js/ts/mjs/mts with `ts-jest`
'^.+\\.tsx?$': [
'ts-jest',
{
useESM: true
}
]
}
};
export default jestConfig;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4282
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "opencommit",
"version": "3.0.8",
"version": "3.0.0",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",
@@ -12,8 +12,7 @@
"aicommit",
"aicommits",
"gptcommit",
"commit",
"ollama"
"commit"
],
"main": "cli.js",
"bin": {
@@ -41,25 +40,28 @@
"scripts": {
"watch": "npm run -S build -- --sourcemap --watch",
"start": "node ./out/cli.cjs",
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
"dev": "ts-node ./src/cli.ts",
"build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
"deploy": "npm version patch && npm run build:push && git push --tags && npm publish --tag latest",
"deploy": "npm run build:push && npm version patch && git push --tags && npm publish --tag latest",
"lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src"
"format": "prettier --write src",
"test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose --coverage --config jest.config.ts"
},
"devDependencies": {
"@commitlint/types": "^17.4.4",
"@types/ini": "^1.3.31",
"@types/inquirer": "^9.0.3",
"@types/jest": "^29.5.4",
"@types/node": "^16.18.14",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"dotenv": "^16.0.3",
"esbuild": "^0.15.18",
"eslint": "^8.28.0",
"jest": "^29.6.4",
"prettier": "^2.8.4",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"typescript": "^4.9.3"
},
@@ -71,7 +73,6 @@
"@dqbd/tiktoken": "^1.0.2",
"@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0",
"ai": "^2.2.14",
"axios": "^1.3.4",
"chalk": "^5.2.0",
"cleye": "^1.3.2",

View File

@@ -13,27 +13,23 @@ import {
CONFIG_MODES,
DEFAULT_MODEL_TOKEN_LIMIT,
getConfig
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
} from './commands/config';
import { GenerateCommitMessageErrorEnum } from './generateCommitMessageFromGitDiff';
import { tokenCount } from './utils/tokenCount';
const config = getConfig();
let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
let basePath = config?.OCO_OPENAI_BASE_PATH;
let apiKey = config?.OCO_OPENAI_API_KEY
let apiKey = config?.OCO_OPENAI_API_KEY;
const [command, mode] = process.argv.slice(2);
const isLocalModel = config?.OCO_AI_PROVIDER == 'ollama'
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalModel) {
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
intro('opencommit');
outro(
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token> . If you are using GPT, make sure you add payment details, so API works.`'
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
@@ -44,7 +40,7 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalMode
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
class OpenAi implements AiEngine {
class OpenAi {
private openAiApiConfiguration = new OpenAiApiConfiguration({
apiKey: apiKey
});
@@ -104,6 +100,16 @@ class OpenAi implements AiEngine {
};
}
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};
export const api = new OpenAi();

View File

@@ -199,17 +199,17 @@ export async function commit(
}
if (stagedFiles.length === 0 && changedFiles.length > 0) {
const files = (await multiselect({
const files = await multiselect({
message: chalk.cyan('Select the files you want to add to the commit:'),
options: changedFiles.map((file) => ({
value: file,
label: file
}))
})) as string[];
});
if (isCancel(files)) process.exit(1);
await gitAdd({ files });
await gitAdd({ files: files as string[] });
}
await commit(extraArgs, false);

View File

@@ -22,8 +22,7 @@ export enum CONFIG_KEYS {
OCO_MODEL = 'OCO_MODEL',
OCO_LANGUAGE = 'OCO_LANGUAGE',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE'
}
export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;
@@ -48,9 +47,8 @@ const validateConfig = (
};
export const configValidators = {
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
//need api key unless running locally with ollama
validateConfig('API_KEY', value || config.OCO_AI_PROVIDER == 'ollama', 'You need to provide an API key');
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config?: any) {
validateConfig(CONFIG_KEYS.OCO_OPENAI_API_KEY, value, 'Cannot be empty');
validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY,
value.startsWith('sk-'),
@@ -129,10 +127,9 @@ export const configValidators = {
'gpt-3.5-turbo',
'gpt-4',
'gpt-3.5-turbo-16k',
'gpt-3.5-turbo-0613',
'gpt-4-1106-preview'
'gpt-3.5-turbo-0613'
].includes(value),
`${value} is not supported yet, use 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613', 'gpt-3.5-turbo' or 'gpt-4-1106-preview'`
`${value} is not supported yet, use 'gpt-4', 'gpt-3.5-turbo-16k' (default), 'gpt-3.5-turbo-0613' or 'gpt-3.5-turbo'`
);
return value;
},
@@ -153,20 +150,7 @@ export const configValidators = {
);
return value;
},
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER,
[
'',
'openai',
'ollama'
].includes(value),
`${value} is not supported yet, use 'ollama' or 'openai' (default)`
);
return value;
},
}
};
export type ConfigType = {
@@ -188,8 +172,7 @@ export const getConfig = (): ConfigType | null => {
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit',
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai'
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit'
};
const configExists = existsSync(configPath);
@@ -215,7 +198,9 @@ export const getConfig = (): ConfigType | null => {
config[configKey] = validValue;
} catch (error) {
outro(`Unknown '${configKey}' config option.`);
outro(
`'${configKey}' name is invalid, it should be either 'OCO_${configKey.toUpperCase()}' or it doesn't exist.`
);
outro(
`Manually fix the '.env' file or global '~/.opencommit' config file.`
);

View File

@@ -1,7 +0,0 @@
import { ChatCompletionRequestMessage } from 'openai';
export interface AiEngine {
generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined>;
}

View File

@@ -1,36 +0,0 @@
import axios, { AxiosError } from 'axios';
import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
export class OllamaAi implements AiEngine {
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
const model = 'mistral'; // todo: allow other models
let prompt = messages.map((x) => x.content).join('\n');
//hoftix: local models are not so clever so im changing the prompt a bit...
prompt += 'Summarize above git diff in 10 words or less';
const url = 'http://localhost:11434/api/generate';
const p = {
model,
prompt,
stream: false
};
try {
const response = await axios.post(url, p, {
headers: {
'Content-Type': 'application/json'
}
});
const answer = response.data?.response;
return answer;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error('local model issues. details: ' + message);
}
}
}
export const ollamaAi = new OllamaAi();

View File

@@ -3,11 +3,11 @@ import {
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { api } from './api';
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
import { getMainCommitPrompt } from './prompts';
import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount';
import { getEngine } from './utils/engine';
const config = getConfig();
@@ -67,8 +67,7 @@ export const generateCommitMessageByDiff = async (
const messages = await generateCommitMessageChatCompletionPrompt(diff);
const engine = getEngine()
const commitMessage = await engine.generateCommitMessage(messages);
const commitMessage = await api.generateCommitMessage(messages);
if (!commitMessage)
throw new Error(GenerateCommitMessageErrorEnum.emptyMessage);
@@ -105,14 +104,13 @@ function getMessagesPromisesByChangesInFile(
}
}
const engine = getEngine()
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
async (lineDiff) => {
const messages = await generateCommitMessageChatCompletionPrompt(
separator + lineDiff
);
return engine.generateCommitMessage(messages);
return api.generateCommitMessage(messages);
}
);
@@ -179,8 +177,7 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
separator + fileDiff
);
const engine = getEngine()
commitMessagePromises.push(engine.generateCommitMessage(messages));
commitMessagePromises.push(api.generateCommitMessage(messages));
}
}

View File

@@ -1,5 +1,6 @@
import { spinner } from '@clack/prompts';
import { api } from '../../api';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
@@ -8,7 +9,6 @@ import { commitlintPrompts, inferPromptsFromCommitlintConfig } from './prompts';
import { getCommitLintPWDConfig } from './pwd-commitlint';
import { CommitlintLLMConfig } from './types';
import * as utils from './utils';
import { getEngine } from '../../utils/engine';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
@@ -55,9 +55,8 @@ export const configureCommitlintIntegration = async (force = false) => {
// consistencyPrompts.map((p) => p.content)
// );
const engine = getEngine()
let consistency =
(await engine.generateCommitMessage(consistencyPrompts)) || '{}';
(await api.generateCommitMessage(consistencyPrompts)) || '{}';
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));

View File

@@ -1,7 +1,7 @@
import path from 'path';
const nodeModulesPath = path.join(
process.env.PWD || process.cwd(),
process.env.PWD as string,
'node_modules',
'@commitlint',
'load'

View File

@@ -3,7 +3,7 @@ import chalk from 'chalk';
import { outro } from '@clack/prompts';
import currentPackage from '../../package.json';
import { getOpenCommitLatestVersion } from '../version';
import { getOpenCommitLatestVersion } from '../api';
export const checkIsLatestVersion = async () => {
const latestVersion = await getOpenCommitLatestVersion();

View File

@@ -1,13 +0,0 @@
import { AiEngine } from '../engine/Engine';
import { api } from '../engine/openAi';
import { getConfig } from '../commands/config';
import { ollamaAi } from '../engine/ollama';
export function getEngine(): AiEngine {
const config = getConfig();
if (config?.OCO_AI_PROVIDER == 'ollama') {
return ollamaAi;
}
//open ai gpt by default
return api;
}

View File

@@ -1,14 +0,0 @@
import { outro } from "@clack/prompts";
import { execa } from "execa";
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};

View File

@@ -1,9 +1,9 @@
{
"compilerOptions": {
"target": "ESNext",
"lib": ["ES5", "ES6"],
"module": "ESNext",
"lib": ["ES5", "ES6"],
// "rootDir": "./src",
"resolveJsonModule": true,
"moduleResolution": "node",