mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-01-12 23:28:16 -05:00
Compare commits
20 Commits
refactorin
...
oco_find_v
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca4be719b2 | ||
|
|
1ce357b023 | ||
|
|
45dd07d229 | ||
|
|
5e37fd29b7 | ||
|
|
fa164377e4 | ||
|
|
0b89767de0 | ||
|
|
2dded4caa4 | ||
|
|
670f74ebc7 | ||
|
|
89d2aa603b | ||
|
|
8702c17758 | ||
|
|
60597d23eb | ||
|
|
6f04927369 | ||
|
|
0c0cf9c627 | ||
|
|
7286456a04 | ||
|
|
8fe8e614ac | ||
|
|
85468823f9 | ||
|
|
7eb9a1b45c | ||
|
|
825c2fe825 | ||
|
|
1b29f3a9fd | ||
|
|
596dcd7cea |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -11,4 +11,6 @@ uncaughtExceptions.log
|
||||
src/*.json
|
||||
.idea
|
||||
test.ts
|
||||
notes.md
|
||||
notes.md
|
||||
*.excalidraw
|
||||
*.tldr
|
||||
|
||||
14
README.md
14
README.md
@@ -28,9 +28,7 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
|
||||
npm install -g opencommit
|
||||
```
|
||||
|
||||
Alternatively run it via `npx opencommit` or `bunx opencommit`
|
||||
|
||||
MacOS may ask to run the command with `sudo` when installing a package globally.
|
||||
Alternatively run it via `npx opencommit` or `bunx opencommit`, but you need to create ~/.opencommit config file in place.
|
||||
|
||||
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
|
||||
|
||||
@@ -162,6 +160,16 @@ oco config set OCO_EMOJI=false
|
||||
|
||||
Other config options are behaving the same.
|
||||
|
||||
### Output WHY the changes were done (WIP)
|
||||
|
||||
You can set the `OCO_WHY` config to `true` to have OpenCommit output a short description of WHY the changes were done after the commit message. Default is `false`.
|
||||
|
||||
To make this perform accurate we must store 'what files do' in some kind of an index or embedding and perform a lookup (kinda RAG) for the accurate git commit message. If you feel like building this comment on this ticket https://github.com/di-sukharev/opencommit/issues/398 and let's go from there together.
|
||||
|
||||
```sh
|
||||
oco config set OCO_WHY=true
|
||||
```
|
||||
|
||||
### Switch to GPT-4 or other models
|
||||
|
||||
By default, OpenCommit uses `gpt-4o-mini` model.
|
||||
|
||||
558
out/cli.cjs
558
out/cli.cjs
@@ -25193,7 +25193,7 @@ function G3(t2, e3) {
|
||||
// package.json
|
||||
var package_default = {
|
||||
name: "opencommit",
|
||||
version: "3.0.20",
|
||||
version: "3.1.1",
|
||||
description: "Auto-generate impressive commits in 1 second. Killing lame commits with AI \u{1F92F}\u{1F52B}",
|
||||
keywords: [
|
||||
"git",
|
||||
@@ -27752,6 +27752,7 @@ var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
|
||||
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
|
||||
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
|
||||
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
|
||||
CONFIG_KEYS2["OCO_WHY"] = "OCO_WHY";
|
||||
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
|
||||
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
|
||||
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
|
||||
@@ -28029,44 +28030,25 @@ var configValidators = {
|
||||
};
|
||||
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
|
||||
var defaultEnvPath = (0, import_path.resolve)(process.cwd(), ".env");
|
||||
var assertConfigsAreValid = (config7) => {
|
||||
for (const [key, value] of Object.entries(config7)) {
|
||||
if (!value)
|
||||
continue;
|
||||
if (typeof value === "string" && ["null", "undefined"].includes(value)) {
|
||||
config7[key] = void 0;
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const validate = configValidators[key];
|
||||
validate(value, config7);
|
||||
} catch (error) {
|
||||
ce(`Unknown '${key}' config option or missing validator.`);
|
||||
ce(
|
||||
`Manually fix the '.env' file or global '~/.opencommit' config file.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
var DEFAULT_CONFIG = {
|
||||
OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */,
|
||||
OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel("openai"),
|
||||
OCO_LANGUAGE: "en",
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg",
|
||||
OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */,
|
||||
OCO_AI_PROVIDER: "openai" /* OPENAI */,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: "commit-message",
|
||||
OCO_FLOWISE_ENDPOINT: ":",
|
||||
OCO_WHY: false,
|
||||
OCO_GITPUSH: true
|
||||
};
|
||||
var initGlobalConfig = () => {
|
||||
const defaultConfig = {
|
||||
OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */,
|
||||
OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel("openai"),
|
||||
OCO_LANGUAGE: "en",
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg",
|
||||
OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */,
|
||||
OCO_AI_PROVIDER: "openai" /* OPENAI */,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: "commit-message",
|
||||
OCO_FLOWISE_ENDPOINT: ":",
|
||||
OCO_GITPUSH: true
|
||||
};
|
||||
(0, import_fs.writeFileSync)(defaultConfigPath, (0, import_ini.stringify)(defaultConfig), "utf8");
|
||||
return defaultConfig;
|
||||
var initGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(DEFAULT_CONFIG), "utf8");
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
var parseEnvVarValue = (value) => {
|
||||
try {
|
||||
@@ -28075,12 +28057,9 @@ var parseEnvVarValue = (value) => {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
var getConfig = ({
|
||||
configPath = defaultConfigPath,
|
||||
envPath = defaultEnvPath
|
||||
} = {}) => {
|
||||
var getEnvConfig = (envPath) => {
|
||||
dotenv.config({ path: envPath });
|
||||
const envConfig = {
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
@@ -28104,23 +28083,35 @@ var getConfig = ({
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH)
|
||||
};
|
||||
};
|
||||
var getGlobalConfig = (configPath) => {
|
||||
let globalConfig;
|
||||
const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath);
|
||||
if (!isGlobalConfigFileExist)
|
||||
globalConfig = initGlobalConfig();
|
||||
globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = (0, import_fs.readFileSync)(configPath, "utf8");
|
||||
globalConfig = (0, import_ini.parse)(configFile);
|
||||
}
|
||||
const mergeObjects = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
const config7 = mergeObjects(envConfig, globalConfig);
|
||||
return globalConfig;
|
||||
};
|
||||
var mergeConfigs = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
var getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
} = {}) => {
|
||||
const envConfig = getEnvConfig(envPath);
|
||||
const globalConfig = getGlobalConfig(globalPath);
|
||||
const config7 = mergeConfigs(envConfig, globalConfig);
|
||||
return config7;
|
||||
};
|
||||
var setConfig = (keyValues, configPath = defaultConfigPath) => {
|
||||
const config7 = getConfig();
|
||||
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
|
||||
const config7 = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join("\n");
|
||||
@@ -28144,8 +28135,7 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
);
|
||||
config7[key] = validValue;
|
||||
}
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config7), "utf8");
|
||||
assertConfigsAreValid(config7);
|
||||
(0, import_fs.writeFileSync)(globalConfigPath, (0, import_ini.stringify)(config7), "utf8");
|
||||
ce(`${source_default.green("\u2714")} config successfully set`);
|
||||
};
|
||||
var configCommand = G3(
|
||||
@@ -37192,42 +37182,42 @@ var OpenAIClient = class {
|
||||
// src/engine/azure.ts
|
||||
var AzureEngine = class {
|
||||
constructor(config7) {
|
||||
this.generateCommitMessage = async (messages) => {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a4, b7) => a4 + b7, 0);
|
||||
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput) {
|
||||
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
|
||||
}
|
||||
const data = await this.client.getChatCompletions(
|
||||
this.config.model,
|
||||
messages
|
||||
);
|
||||
const message = data.choices[0].message;
|
||||
if (message?.content === null) {
|
||||
return void 0;
|
||||
}
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
ce(`${source_default.red("\u2716")} ${this.config.model}`);
|
||||
const err = error;
|
||||
ce(`${source_default.red("\u2716")} ${JSON.stringify(error)}`);
|
||||
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
|
||||
const openAiError = error.response.data.error;
|
||||
if (openAiError?.message)
|
||||
ce(openAiError.message);
|
||||
ce(
|
||||
"For help look into README https://github.com/di-sukharev/opencommit#setup"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
this.config = config7;
|
||||
this.client = new OpenAIClient(
|
||||
this.config.baseURL,
|
||||
new AzureKeyCredential(this.config.apiKey)
|
||||
);
|
||||
}
|
||||
async generateCommitMessage(messages) {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a4, b7) => a4 + b7, 0);
|
||||
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput) {
|
||||
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
|
||||
}
|
||||
const data = await this.client.getChatCompletions(
|
||||
this.config.model,
|
||||
messages
|
||||
);
|
||||
const message = data.choices[0].message;
|
||||
if (message?.content === null) {
|
||||
return void 0;
|
||||
}
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
ce(`${source_default.red("\u2716")} ${this.config.model}`);
|
||||
const err = error;
|
||||
ce(`${source_default.red("\u2716")} ${JSON.stringify(error)}`);
|
||||
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
|
||||
const openAiError = error.response.data.error;
|
||||
if (openAiError?.message)
|
||||
ce(openAiError.message);
|
||||
ce(
|
||||
"For help look into README https://github.com/di-sukharev/opencommit#setup"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// src/engine/flowise.ts
|
||||
@@ -42400,7 +42390,7 @@ var OpenAiEngine = class {
|
||||
function getEngine() {
|
||||
const config7 = getConfig();
|
||||
const provider = config7.OCO_AI_PROVIDER;
|
||||
const DEFAULT_CONFIG = {
|
||||
const DEFAULT_CONFIG2 = {
|
||||
model: config7.OCO_MODEL,
|
||||
maxTokensOutput: config7.OCO_TOKENS_MAX_OUTPUT,
|
||||
maxTokensInput: config7.OCO_TOKENS_MAX_INPUT,
|
||||
@@ -42409,37 +42399,37 @@ function getEngine() {
|
||||
switch (provider) {
|
||||
case "ollama" /* OLLAMA */:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: "",
|
||||
baseURL: config7.OCO_OLLAMA_API_URL
|
||||
});
|
||||
case "anthropic" /* ANTHROPIC */:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_ANTHROPIC_API_KEY
|
||||
});
|
||||
case "test" /* TEST */:
|
||||
return new TestAi(config7.OCO_TEST_MOCK_TYPE);
|
||||
case "gemini" /* GEMINI */:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_GEMINI_API_KEY,
|
||||
baseURL: config7.OCO_GEMINI_BASE_PATH
|
||||
});
|
||||
case "azure" /* AZURE */:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_AZURE_API_KEY
|
||||
});
|
||||
case "flowise" /* FLOWISE */:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG,
|
||||
baseURL: config7.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
|
||||
...DEFAULT_CONFIG2,
|
||||
baseURL: config7.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG2.baseURL,
|
||||
apiKey: config7.OCO_FLOWISE_API_KEY
|
||||
});
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config7.OCO_OPENAI_API_KEY
|
||||
});
|
||||
}
|
||||
@@ -42580,7 +42570,7 @@ Example Git Diff is to follow:`
|
||||
];
|
||||
var INIT_MAIN_PROMPT = (language, prompts) => ({
|
||||
role: "system",
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${config2.OCO_WHY ? "and WHY the changes were done" : ""}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${config2.OCO_EMOJI ? "Use GitMoji convention to preface the commit." : "Do not preface the commit with anything."}
|
||||
${config2.OCO_DESCRIPTION ? `Add a short description of WHY the changes are done after the commit message. Don't start it with "This commit", just describe the changes.` : "Don't add any descriptions to the commit, only commit message."}
|
||||
Use the present tense. Use ${language} to answer.
|
||||
@@ -42600,12 +42590,23 @@ var commitlintPrompts = {
|
||||
// src/modules/commitlint/pwd-commitlint.ts
|
||||
var import_promises = __toESM(require("fs/promises"), 1);
|
||||
var import_path3 = __toESM(require("path"), 1);
|
||||
var findModulePath = (moduleName) => {
|
||||
const searchPaths = [
|
||||
import_path3.default.join("node_modules", moduleName),
|
||||
import_path3.default.join("node_modules", ".pnpm")
|
||||
];
|
||||
for (const basePath of searchPaths) {
|
||||
try {
|
||||
const resolvedPath = require.resolve(moduleName, { paths: [basePath] });
|
||||
return resolvedPath;
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
throw new Error(`Cannot find module ${moduleName}`);
|
||||
};
|
||||
var getCommitLintModuleType = async () => {
|
||||
const packageFile = "node_modules/@commitlint/load/package.json";
|
||||
const packageJsonPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
packageFile
|
||||
);
|
||||
const packageFile = "@commitlint/load/package.json";
|
||||
const packageJsonPath = findModulePath(packageFile);
|
||||
const packageJson = JSON.parse(await import_promises.default.readFile(packageJsonPath, "utf8"));
|
||||
if (!packageJson) {
|
||||
throw new Error(`Failed to parse ${packageFile}`);
|
||||
@@ -42613,21 +42614,15 @@ var getCommitLintModuleType = async () => {
|
||||
return packageJson.type === "module" ? "esm" : "cjs";
|
||||
};
|
||||
var getCommitLintPWDConfig = async () => {
|
||||
let load, nodeModulesPath;
|
||||
let load, modulePath;
|
||||
switch (await getCommitLintModuleType()) {
|
||||
case "cjs":
|
||||
nodeModulesPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
"node_modules/@commitlint/load"
|
||||
);
|
||||
load = require(nodeModulesPath).default;
|
||||
modulePath = findModulePath("@commitlint/load");
|
||||
load = require(modulePath).default;
|
||||
break;
|
||||
case "esm":
|
||||
nodeModulesPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
"node_modules/@commitlint/load/lib/load.js"
|
||||
);
|
||||
load = (await import(nodeModulesPath)).default;
|
||||
modulePath = await findModulePath("@commitlint/load/lib/load.js");
|
||||
load = (await import(modulePath)).default;
|
||||
break;
|
||||
}
|
||||
if (load && typeof load === "function") {
|
||||
@@ -43065,12 +43060,17 @@ var assertGitRepo = async () => {
|
||||
throw new Error(error);
|
||||
}
|
||||
};
|
||||
var getIgnoredFolders = () => {
|
||||
try {
|
||||
return (0, import_fs3.readFileSync)(".opencommitignore").toString().split("\n");
|
||||
} catch (e3) {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
var getOpenCommitIgnore = () => {
|
||||
const ig = (0, import_ignore.default)();
|
||||
try {
|
||||
ig.add((0, import_fs3.readFileSync)(".opencommitignore").toString().split("\n"));
|
||||
} catch (e3) {
|
||||
}
|
||||
const ignorePatterns = getIgnoredFolders();
|
||||
ig.add(ignorePatterns);
|
||||
return ig;
|
||||
};
|
||||
var getCoreHooksPath = async () => {
|
||||
@@ -43172,8 +43172,8 @@ var generateCommitMessageFromGitDiff = async ({
|
||||
skipCommitConfirmation = false
|
||||
}) => {
|
||||
await assertGitRepo();
|
||||
const commitSpinner = le();
|
||||
commitSpinner.start("Generating the commit message");
|
||||
const commitGenerationSpinner = le();
|
||||
commitGenerationSpinner.start("Generating the commit message");
|
||||
try {
|
||||
let commitMessage = await generateCommitMessageByDiff(
|
||||
diff,
|
||||
@@ -43188,7 +43188,7 @@ var generateCommitMessageFromGitDiff = async ({
|
||||
commitMessage
|
||||
);
|
||||
}
|
||||
commitSpinner.stop("\u{1F4DD} Commit message generated");
|
||||
commitGenerationSpinner.stop("\u{1F4DD} Commit message generated");
|
||||
ce(
|
||||
`Generated commit message:
|
||||
${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014")}
|
||||
@@ -43198,14 +43198,20 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
|
||||
const isCommitConfirmedByUser = skipCommitConfirmation || await Q3({
|
||||
message: "Confirm the commit message?"
|
||||
});
|
||||
if (isCommitConfirmedByUser && !hD2(isCommitConfirmedByUser)) {
|
||||
if (hD2(isCommitConfirmedByUser))
|
||||
process.exit(1);
|
||||
if (isCommitConfirmedByUser) {
|
||||
const committingChangesSpinner = le();
|
||||
committingChangesSpinner.start("Committing the changes");
|
||||
const { stdout } = await execa("git", [
|
||||
"commit",
|
||||
"-m",
|
||||
commitMessage,
|
||||
...extraArgs2
|
||||
]);
|
||||
ce(`${source_default.green("\u2714")} Successfully committed`);
|
||||
committingChangesSpinner.stop(
|
||||
`${source_default.green("\u2714")} Successfully committed`
|
||||
);
|
||||
ce(stdout);
|
||||
const remotes = await getGitRemotes();
|
||||
if (!remotes.length) {
|
||||
@@ -43218,7 +43224,9 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
|
||||
const isPushConfirmedByUser = await Q3({
|
||||
message: "Do you want to run `git push`?"
|
||||
});
|
||||
if (isPushConfirmedByUser && !hD2(isPushConfirmedByUser)) {
|
||||
if (hD2(isPushConfirmedByUser))
|
||||
process.exit(1);
|
||||
if (isPushConfirmedByUser) {
|
||||
const pushSpinner = le();
|
||||
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
|
||||
const { stdout: stdout2 } = await execa("git", [
|
||||
@@ -43240,26 +43248,26 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
|
||||
message: "Choose a remote to push to",
|
||||
options: remotes.map((remote) => ({ value: remote, label: remote }))
|
||||
});
|
||||
if (!hD2(selectedRemote)) {
|
||||
const pushSpinner = le();
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
const { stdout: stdout2 } = await execa("git", ["push", selectedRemote]);
|
||||
pushSpinner.stop(
|
||||
`${source_default.green(
|
||||
"\u2714"
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
if (stdout2)
|
||||
ce(stdout2);
|
||||
} else
|
||||
ce(`${source_default.gray("\u2716")} process cancelled`);
|
||||
if (hD2(selectedRemote))
|
||||
process.exit(1);
|
||||
const pushSpinner = le();
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
const { stdout: stdout2 } = await execa("git", ["push", selectedRemote]);
|
||||
pushSpinner.stop(
|
||||
`${source_default.green(
|
||||
"\u2714"
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
if (stdout2)
|
||||
ce(stdout2);
|
||||
}
|
||||
}
|
||||
if (!isCommitConfirmedByUser && !hD2(isCommitConfirmedByUser)) {
|
||||
} else {
|
||||
const regenerateMessage = await Q3({
|
||||
message: "Do you want to regenerate the message?"
|
||||
});
|
||||
if (regenerateMessage && !hD2(isCommitConfirmedByUser)) {
|
||||
if (hD2(regenerateMessage))
|
||||
process.exit(1);
|
||||
if (regenerateMessage) {
|
||||
await generateCommitMessageFromGitDiff({
|
||||
diff,
|
||||
extraArgs: extraArgs2,
|
||||
@@ -43268,7 +43276,7 @@ ${source_default.grey("\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2014\u2
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
commitSpinner.stop("\u{1F4DD} Commit message generated");
|
||||
commitGenerationSpinner.stop("\u{1F4DD} Commit message generated");
|
||||
const err = error;
|
||||
ce(`${source_default.red("\u2716")} ${err?.message || err}`);
|
||||
process.exit(1);
|
||||
@@ -43302,7 +43310,9 @@ async function commit(extraArgs2 = [], isStageAllFlag = false, fullGitMojiSpec =
|
||||
const isStageAllAndCommitConfirmedByUser = await Q3({
|
||||
message: "Do you want to stage all files and generate commit message?"
|
||||
});
|
||||
if (isStageAllAndCommitConfirmedByUser && !hD2(isStageAllAndCommitConfirmedByUser)) {
|
||||
if (hD2(isStageAllAndCommitConfirmedByUser))
|
||||
process.exit(1);
|
||||
if (isStageAllAndCommitConfirmedByUser) {
|
||||
await commit(extraArgs2, true, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -43347,12 +43357,12 @@ var commitlintConfigCommand = G3(
|
||||
parameters: ["<mode>"]
|
||||
},
|
||||
async (argv) => {
|
||||
ae("opencommit \u2014 configure @commitlint");
|
||||
ae("OpenCommit \u2014 configure @commitlint");
|
||||
try {
|
||||
const { mode } = argv._;
|
||||
if (mode === "get" /* get */) {
|
||||
const commitLintConfig = await getCommitlintLLMConfig();
|
||||
ce(commitLintConfig.toString());
|
||||
ce(JSON.stringify(commitLintConfig, null, 2));
|
||||
return;
|
||||
}
|
||||
if (mode === "force" /* force */) {
|
||||
@@ -43475,7 +43485,7 @@ var prepareCommitMessageHook = async (isStageAllFlag = false) => {
|
||||
const staged = await getStagedFiles();
|
||||
if (!staged)
|
||||
return;
|
||||
ae("opencommit");
|
||||
ae("OpenCommit");
|
||||
const config7 = getConfig();
|
||||
if (!config7.OCO_OPENAI_API_KEY && !config7.OCO_ANTHROPIC_API_KEY && !config7.OCO_AZURE_API_KEY) {
|
||||
throw new Error(
|
||||
@@ -43529,13 +43539,281 @@ Current version: ${currentVersion}. Latest version: ${latestVersion}.
|
||||
}
|
||||
};
|
||||
|
||||
// src/commands/find.ts
|
||||
var generateMermaid = async (stdout) => {
|
||||
const config7 = getConfig();
|
||||
const DEFAULT_CONFIG = {
|
||||
model: config7.OCO_MODEL,
|
||||
maxTokensOutput: config7.OCO_TOKENS_MAX_OUTPUT,
|
||||
maxTokensInput: config7.OCO_TOKENS_MAX_INPUT,
|
||||
baseURL: config7.OCO_OPENAI_BASE_PATH
|
||||
};
|
||||
const engine = new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config7.OCO_OPENAI_API_KEY
|
||||
});
|
||||
const diagram = await engine.generateCommitMessage([
|
||||
{
|
||||
role: "system",
|
||||
content: `You are to generate a mermaid diagram from the given function. Strictly answer in this json format: { "mermaid": "<mermaid diagram>" }. Where <mermaid diagram> is a valid mermaid diagram, e.g:
|
||||
graph TD
|
||||
A[Start] --> B[Generate Commit Message]
|
||||
B --> C{Token count >= Max?}
|
||||
C -->|Yes| D[Process file diffs]
|
||||
C -->|No| E[Generate single message]
|
||||
D --> F[Join messages]
|
||||
E --> G[Generate message]
|
||||
F --> H[End]
|
||||
G --> H
|
||||
B --> I{Error occurred?}
|
||||
I -->|Yes| J[Handle error]
|
||||
J --> H
|
||||
I -->|No| H
|
||||
`
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: stdout
|
||||
}
|
||||
]);
|
||||
return JSON.parse(diagram);
|
||||
};
|
||||
function extractFuncName(line) {
|
||||
const regex = /(?:function|export\s+const|const|let|var)?\s*(?:async\s+)?(\w+)\s*(?:=\s*(?:async\s*)?\(|\()/;
|
||||
const match = line.match(regex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
function extractSingle(lineContent) {
|
||||
const match = lineContent.match(/\s*(?:public\s+)?(?:async\s+)?(\w+)\s*=/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
function mapLinesToOccurrences(input, step = 3) {
|
||||
const occurrences = [];
|
||||
let single;
|
||||
for (let i3 = 0; i3 < input.length; i3 += step) {
|
||||
if (i3 + 1 >= input.length)
|
||||
break;
|
||||
const [fileName, callerLineNumber, ...callerLineContent] = input[i3].split(/[=:]/);
|
||||
const [, definitionLineNumber, ...definitionLineContent] = input[i3 + 1].split(/[:]/);
|
||||
if (!single)
|
||||
single = extractSingle(definitionLineContent.join(":"));
|
||||
occurrences.push({
|
||||
fileName,
|
||||
context: {
|
||||
number: parseInt(callerLineNumber, 10),
|
||||
content: callerLineContent.join("=").trim()
|
||||
},
|
||||
matches: [
|
||||
{
|
||||
number: parseInt(definitionLineNumber, 10),
|
||||
content: definitionLineContent.join(":").trim()
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
return { occurrences, single };
|
||||
}
|
||||
var findDeclarations = async (query, ignoredFolders) => {
|
||||
const searchQuery = `(async|function|public).*${query.join("[^ \\n]*")}`;
|
||||
ce(`Searching: ${searchQuery}`);
|
||||
const occurrences = await findInFiles({ query: searchQuery, ignoredFolders });
|
||||
if (!occurrences)
|
||||
return null;
|
||||
const declarations = mapLinesToOccurrences(occurrences.split("\n"));
|
||||
return declarations;
|
||||
};
|
||||
var findUsagesByDeclaration = async (declaration, ignoredFolders) => {
|
||||
const searchQuery = `${declaration}\\(.*\\)`;
|
||||
const occurrences = await findInFiles({
|
||||
query: searchQuery,
|
||||
ignoredFolders
|
||||
});
|
||||
if (!occurrences)
|
||||
return null;
|
||||
const usages = mapLinesToOccurrences(
|
||||
occurrences.split("\n").filter(Boolean),
|
||||
2
|
||||
);
|
||||
return usages;
|
||||
};
|
||||
var findInFiles = async ({
|
||||
query,
|
||||
ignoredFolders,
|
||||
grepOptions = []
|
||||
}) => {
|
||||
const withIgnoredFolders = ignoredFolders.length > 0 ? [
|
||||
"--",
|
||||
" ",
|
||||
".",
|
||||
" ",
|
||||
ignoredFolders.map((folder) => `:^${folder}`).join(" ")
|
||||
] : [];
|
||||
const params = [
|
||||
"--no-pager",
|
||||
"grep",
|
||||
"--show-function",
|
||||
"-n",
|
||||
"-i",
|
||||
...grepOptions,
|
||||
"--break",
|
||||
"--color=never",
|
||||
"--threads",
|
||||
"10",
|
||||
"-E",
|
||||
query,
|
||||
...withIgnoredFolders
|
||||
];
|
||||
try {
|
||||
const { stdout } = await execa("git", params);
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
var generatePermutations = (arr) => {
|
||||
const n2 = arr.length;
|
||||
const result = [];
|
||||
const indices = new Int32Array(n2);
|
||||
const current = new Array(n2);
|
||||
for (let i4 = 0; i4 < n2; i4++) {
|
||||
indices[i4] = i4;
|
||||
current[i4] = arr[i4];
|
||||
}
|
||||
result.push([...current]);
|
||||
let i3 = 1;
|
||||
while (i3 < n2) {
|
||||
if (indices[i3] > 0) {
|
||||
const j4 = indices[i3] % 2 === 1 ? 0 : indices[i3];
|
||||
[current[i3], current[j4]] = [current[j4], current[i3]];
|
||||
result.push([...current]);
|
||||
indices[i3]--;
|
||||
i3 = 1;
|
||||
} else {
|
||||
indices[i3] = i3;
|
||||
i3++;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
var shuffleQuery = (query) => {
|
||||
return generatePermutations(query);
|
||||
};
|
||||
var findCommand = G3(
|
||||
{
|
||||
name: "find" /* find */,
|
||||
parameters: ["<query...>"]
|
||||
},
|
||||
async (argv) => {
|
||||
const query = argv._;
|
||||
ae(`OpenCommit \u2014 \u{1F526} find`);
|
||||
const ignoredFolders = getIgnoredFolders();
|
||||
const searchSpinner = le();
|
||||
let declarations = await findDeclarations(query, ignoredFolders);
|
||||
ce(`No matches found. Searching semantically similar queries.`);
|
||||
searchSpinner.start(`Searching for matches...`);
|
||||
if (!declarations?.occurrences.length) {
|
||||
const allPossibleQueries = shuffleQuery(query).reverse();
|
||||
for (const possibleQuery of allPossibleQueries) {
|
||||
declarations = await findDeclarations(possibleQuery, ignoredFolders);
|
||||
if (declarations?.occurrences.length)
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!declarations?.occurrences.length) {
|
||||
searchSpinner.stop(`${source_default.red("\u2718")} No function declarations found.`);
|
||||
return process.exit(1);
|
||||
}
|
||||
const usages = await findUsagesByDeclaration(
|
||||
declarations.single,
|
||||
ignoredFolders
|
||||
);
|
||||
searchSpinner.stop(
|
||||
`${source_default.green("\u2714")} Found ${source_default.green(
|
||||
declarations.single
|
||||
)} definition and ${usages?.occurrences.length} usages.`
|
||||
);
|
||||
ie(
|
||||
declarations.occurrences.map(
|
||||
(o3) => o3.matches.map(
|
||||
(m5) => `${o3.fileName}:${m5.number} ${source_default.cyan(
|
||||
"==>"
|
||||
)} ${m5.content.replace(
|
||||
declarations.single,
|
||||
source_default.green(declarations.single)
|
||||
)}`
|
||||
).join("\n")
|
||||
).join("\n"),
|
||||
"\u235C DECLARATIONS \u235C"
|
||||
);
|
||||
ie(
|
||||
usages?.occurrences.map(
|
||||
(o3) => o3.matches.map(
|
||||
(m5) => `${o3.fileName}:${m5.number} ${source_default.cyan(
|
||||
"==>"
|
||||
)} ${m5.content.replace(
|
||||
declarations.single,
|
||||
source_default.green(declarations.single)
|
||||
)}`
|
||||
)
|
||||
).join("\n"),
|
||||
"\u233E USAGES \u233E"
|
||||
);
|
||||
const usage = await ee({
|
||||
message: source_default.cyan("Expand usage:"),
|
||||
options: usages.occurrences.map(
|
||||
(o3) => o3.matches.map((m5) => ({
|
||||
value: { o: o3, m: m5 },
|
||||
label: `${source_default.yellow(`${o3.fileName}:${m5.number}`)} ${source_default.cyan(
|
||||
"==>"
|
||||
)} ${m5.content.replace(
|
||||
declarations.single,
|
||||
source_default.green(declarations.single)
|
||||
)}`,
|
||||
hint: `parent: ${extractFuncName(o3.context.content) ?? "404"}`
|
||||
}))
|
||||
).flat()
|
||||
});
|
||||
if (hD2(usage))
|
||||
process.exit(1);
|
||||
const { stdout } = await execa("git", [
|
||||
"--no-pager",
|
||||
"grep",
|
||||
"--function-context",
|
||||
"--heading",
|
||||
"-E",
|
||||
usage.m.content.replace("(", "\\(").replace(")", "\\)"),
|
||||
usage.o.fileName
|
||||
]);
|
||||
const mermaidSpinner = le();
|
||||
mermaidSpinner.start("Generating mermaid diagram...");
|
||||
const mermaid = await generateMermaid(stdout);
|
||||
mermaidSpinner.stop();
|
||||
if (mermaid)
|
||||
console.log(mermaid.mermaid);
|
||||
else
|
||||
ie("No mermaid diagram found.");
|
||||
const isCommitConfirmedByUser = await Q3({
|
||||
message: "Create Excalidraw file?"
|
||||
});
|
||||
if (isCommitConfirmedByUser)
|
||||
ce("created diagram.excalidraw");
|
||||
else
|
||||
ce("Excalidraw file not created.");
|
||||
}
|
||||
);
|
||||
|
||||
// src/cli.ts
|
||||
var extraArgs = process.argv.slice(2);
|
||||
Z2(
|
||||
{
|
||||
version: package_default.version,
|
||||
name: "opencommit",
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
commands: [
|
||||
configCommand,
|
||||
hookCommand,
|
||||
commitlintConfigCommand,
|
||||
findCommand
|
||||
],
|
||||
flags: {
|
||||
fgm: Boolean,
|
||||
yes: {
|
||||
|
||||
@@ -46564,6 +46564,7 @@ var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
|
||||
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
|
||||
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
|
||||
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
|
||||
CONFIG_KEYS2["OCO_WHY"] = "OCO_WHY";
|
||||
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
|
||||
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
|
||||
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
|
||||
@@ -46841,44 +46842,25 @@ var configValidators = {
|
||||
};
|
||||
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
|
||||
var defaultEnvPath = (0, import_path.resolve)(process.cwd(), ".env");
|
||||
var assertConfigsAreValid = (config6) => {
|
||||
for (const [key, value] of Object.entries(config6)) {
|
||||
if (!value)
|
||||
continue;
|
||||
if (typeof value === "string" && ["null", "undefined"].includes(value)) {
|
||||
config6[key] = void 0;
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const validate = configValidators[key];
|
||||
validate(value, config6);
|
||||
} catch (error) {
|
||||
ce(`Unknown '${key}' config option or missing validator.`);
|
||||
ce(
|
||||
`Manually fix the '.env' file or global '~/.opencommit' config file.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
var DEFAULT_CONFIG = {
|
||||
OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */,
|
||||
OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel("openai"),
|
||||
OCO_LANGUAGE: "en",
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg",
|
||||
OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */,
|
||||
OCO_AI_PROVIDER: "openai" /* OPENAI */,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: "commit-message",
|
||||
OCO_FLOWISE_ENDPOINT: ":",
|
||||
OCO_WHY: false,
|
||||
OCO_GITPUSH: true
|
||||
};
|
||||
var initGlobalConfig = () => {
|
||||
const defaultConfig = {
|
||||
OCO_TOKENS_MAX_INPUT: 40960 /* DEFAULT_MAX_TOKENS_INPUT */,
|
||||
OCO_TOKENS_MAX_OUTPUT: 4096 /* DEFAULT_MAX_TOKENS_OUTPUT */,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel("openai"),
|
||||
OCO_LANGUAGE: "en",
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: "$msg",
|
||||
OCO_PROMPT_MODULE: "conventional-commit" /* CONVENTIONAL_COMMIT */,
|
||||
OCO_AI_PROVIDER: "openai" /* OPENAI */,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: "commit-message",
|
||||
OCO_FLOWISE_ENDPOINT: ":",
|
||||
OCO_GITPUSH: true
|
||||
};
|
||||
(0, import_fs.writeFileSync)(defaultConfigPath, (0, import_ini.stringify)(defaultConfig), "utf8");
|
||||
return defaultConfig;
|
||||
var initGlobalConfig = (configPath = defaultConfigPath) => {
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(DEFAULT_CONFIG), "utf8");
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
var parseEnvVarValue = (value) => {
|
||||
try {
|
||||
@@ -46887,12 +46869,9 @@ var parseEnvVarValue = (value) => {
|
||||
return value;
|
||||
}
|
||||
};
|
||||
var getConfig = ({
|
||||
configPath = defaultConfigPath,
|
||||
envPath = defaultEnvPath
|
||||
} = {}) => {
|
||||
var getEnvConfig = (envPath) => {
|
||||
dotenv.config({ path: envPath });
|
||||
const envConfig = {
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
OCO_ANTHROPIC_API_KEY: process.env.OCO_ANTHROPIC_API_KEY,
|
||||
@@ -46916,23 +46895,35 @@ var getConfig = ({
|
||||
OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE,
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH)
|
||||
};
|
||||
};
|
||||
var getGlobalConfig = (configPath) => {
|
||||
let globalConfig;
|
||||
const isGlobalConfigFileExist = (0, import_fs.existsSync)(configPath);
|
||||
if (!isGlobalConfigFileExist)
|
||||
globalConfig = initGlobalConfig();
|
||||
globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = (0, import_fs.readFileSync)(configPath, "utf8");
|
||||
globalConfig = (0, import_ini.parse)(configFile);
|
||||
}
|
||||
const mergeObjects = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
const config6 = mergeObjects(envConfig, globalConfig);
|
||||
return globalConfig;
|
||||
};
|
||||
var mergeConfigs = (main, fallback) => Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return acc;
|
||||
}, {});
|
||||
var getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
} = {}) => {
|
||||
const envConfig = getEnvConfig(envPath);
|
||||
const globalConfig = getGlobalConfig(globalPath);
|
||||
const config6 = mergeConfigs(envConfig, globalConfig);
|
||||
return config6;
|
||||
};
|
||||
var setConfig = (keyValues, configPath = defaultConfigPath) => {
|
||||
const config6 = getConfig();
|
||||
var setConfig = (keyValues, globalConfigPath = defaultConfigPath) => {
|
||||
const config6 = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
const supportedKeys = Object.keys(configValidators).join("\n");
|
||||
@@ -46956,8 +46947,7 @@ For more help refer to our docs: https://github.com/di-sukharev/opencommit`
|
||||
);
|
||||
config6[key] = validValue;
|
||||
}
|
||||
(0, import_fs.writeFileSync)(configPath, (0, import_ini.stringify)(config6), "utf8");
|
||||
assertConfigsAreValid(config6);
|
||||
(0, import_fs.writeFileSync)(globalConfigPath, (0, import_ini.stringify)(config6), "utf8");
|
||||
ce(`${source_default.green("\u2714")} config successfully set`);
|
||||
};
|
||||
var configCommand = G2(
|
||||
@@ -56004,42 +55994,42 @@ var OpenAIClient = class {
|
||||
// src/engine/azure.ts
|
||||
var AzureEngine = class {
|
||||
constructor(config6) {
|
||||
this.generateCommitMessage = async (messages) => {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a3, b3) => a3 + b3, 0);
|
||||
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput) {
|
||||
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
|
||||
}
|
||||
const data = await this.client.getChatCompletions(
|
||||
this.config.model,
|
||||
messages
|
||||
);
|
||||
const message = data.choices[0].message;
|
||||
if (message?.content === null) {
|
||||
return void 0;
|
||||
}
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
ce(`${source_default.red("\u2716")} ${this.config.model}`);
|
||||
const err = error;
|
||||
ce(`${source_default.red("\u2716")} ${JSON.stringify(error)}`);
|
||||
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
|
||||
const openAiError = error.response.data.error;
|
||||
if (openAiError?.message)
|
||||
ce(openAiError.message);
|
||||
ce(
|
||||
"For help look into README https://github.com/di-sukharev/opencommit#setup"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
this.config = config6;
|
||||
this.client = new OpenAIClient(
|
||||
this.config.baseURL,
|
||||
new AzureKeyCredential(this.config.apiKey)
|
||||
);
|
||||
}
|
||||
async generateCommitMessage(messages) {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages.map((msg) => tokenCount(msg.content) + 4).reduce((a3, b3) => a3 + b3, 0);
|
||||
if (REQUEST_TOKENS > this.config.maxTokensInput - this.config.maxTokensOutput) {
|
||||
throw new Error("TOO_MUCH_TOKENS" /* tooMuchTokens */);
|
||||
}
|
||||
const data = await this.client.getChatCompletions(
|
||||
this.config.model,
|
||||
messages
|
||||
);
|
||||
const message = data.choices[0].message;
|
||||
if (message?.content === null) {
|
||||
return void 0;
|
||||
}
|
||||
return message?.content;
|
||||
} catch (error) {
|
||||
ce(`${source_default.red("\u2716")} ${this.config.model}`);
|
||||
const err = error;
|
||||
ce(`${source_default.red("\u2716")} ${JSON.stringify(error)}`);
|
||||
if (axios_default.isAxiosError(error) && error.response?.status === 401) {
|
||||
const openAiError = error.response.data.error;
|
||||
if (openAiError?.message)
|
||||
ce(openAiError.message);
|
||||
ce(
|
||||
"For help look into README https://github.com/di-sukharev/opencommit#setup"
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// src/engine/flowise.ts
|
||||
@@ -61212,7 +61202,7 @@ var OpenAiEngine = class {
|
||||
function getEngine() {
|
||||
const config6 = getConfig();
|
||||
const provider = config6.OCO_AI_PROVIDER;
|
||||
const DEFAULT_CONFIG = {
|
||||
const DEFAULT_CONFIG2 = {
|
||||
model: config6.OCO_MODEL,
|
||||
maxTokensOutput: config6.OCO_TOKENS_MAX_OUTPUT,
|
||||
maxTokensInput: config6.OCO_TOKENS_MAX_INPUT,
|
||||
@@ -61221,37 +61211,37 @@ function getEngine() {
|
||||
switch (provider) {
|
||||
case "ollama" /* OLLAMA */:
|
||||
return new OllamaAi({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: "",
|
||||
baseURL: config6.OCO_OLLAMA_API_URL
|
||||
});
|
||||
case "anthropic" /* ANTHROPIC */:
|
||||
return new AnthropicEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_ANTHROPIC_API_KEY
|
||||
});
|
||||
case "test" /* TEST */:
|
||||
return new TestAi(config6.OCO_TEST_MOCK_TYPE);
|
||||
case "gemini" /* GEMINI */:
|
||||
return new Gemini({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_GEMINI_API_KEY,
|
||||
baseURL: config6.OCO_GEMINI_BASE_PATH
|
||||
});
|
||||
case "azure" /* AZURE */:
|
||||
return new AzureEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_AZURE_API_KEY
|
||||
});
|
||||
case "flowise" /* FLOWISE */:
|
||||
return new FlowiseAi({
|
||||
...DEFAULT_CONFIG,
|
||||
baseURL: config6.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG.baseURL,
|
||||
...DEFAULT_CONFIG2,
|
||||
baseURL: config6.OCO_FLOWISE_ENDPOINT || DEFAULT_CONFIG2.baseURL,
|
||||
apiKey: config6.OCO_FLOWISE_API_KEY
|
||||
});
|
||||
default:
|
||||
return new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
...DEFAULT_CONFIG2,
|
||||
apiKey: config6.OCO_OPENAI_API_KEY
|
||||
});
|
||||
}
|
||||
@@ -61392,7 +61382,7 @@ Example Git Diff is to follow:`
|
||||
];
|
||||
var INIT_MAIN_PROMPT = (language, prompts) => ({
|
||||
role: "system",
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${config2.OCO_WHY ? "and WHY the changes were done" : ""}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${config2.OCO_EMOJI ? "Use GitMoji convention to preface the commit." : "Do not preface the commit with anything."}
|
||||
${config2.OCO_DESCRIPTION ? `Add a short description of WHY the changes are done after the commit message. Don't start it with "This commit", just describe the changes.` : "Don't add any descriptions to the commit, only commit message."}
|
||||
Use the present tense. Use ${language} to answer.
|
||||
@@ -61412,12 +61402,23 @@ var commitlintPrompts = {
|
||||
// src/modules/commitlint/pwd-commitlint.ts
|
||||
var import_promises = __toESM(require("fs/promises"), 1);
|
||||
var import_path3 = __toESM(require("path"), 1);
|
||||
var findModulePath = (moduleName) => {
|
||||
const searchPaths = [
|
||||
import_path3.default.join("node_modules", moduleName),
|
||||
import_path3.default.join("node_modules", ".pnpm")
|
||||
];
|
||||
for (const basePath of searchPaths) {
|
||||
try {
|
||||
const resolvedPath = require.resolve(moduleName, { paths: [basePath] });
|
||||
return resolvedPath;
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
throw new Error(`Cannot find module ${moduleName}`);
|
||||
};
|
||||
var getCommitLintModuleType = async () => {
|
||||
const packageFile = "node_modules/@commitlint/load/package.json";
|
||||
const packageJsonPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
packageFile
|
||||
);
|
||||
const packageFile = "@commitlint/load/package.json";
|
||||
const packageJsonPath = findModulePath(packageFile);
|
||||
const packageJson = JSON.parse(await import_promises.default.readFile(packageJsonPath, "utf8"));
|
||||
if (!packageJson) {
|
||||
throw new Error(`Failed to parse ${packageFile}`);
|
||||
@@ -61425,21 +61426,15 @@ var getCommitLintModuleType = async () => {
|
||||
return packageJson.type === "module" ? "esm" : "cjs";
|
||||
};
|
||||
var getCommitLintPWDConfig = async () => {
|
||||
let load, nodeModulesPath;
|
||||
let load, modulePath;
|
||||
switch (await getCommitLintModuleType()) {
|
||||
case "cjs":
|
||||
nodeModulesPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
"node_modules/@commitlint/load"
|
||||
);
|
||||
load = require(nodeModulesPath).default;
|
||||
modulePath = findModulePath("@commitlint/load");
|
||||
load = require(modulePath).default;
|
||||
break;
|
||||
case "esm":
|
||||
nodeModulesPath = import_path3.default.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
"node_modules/@commitlint/load/lib/load.js"
|
||||
);
|
||||
load = (await import(nodeModulesPath)).default;
|
||||
modulePath = await findModulePath("@commitlint/load/lib/load.js");
|
||||
load = (await import(modulePath)).default;
|
||||
break;
|
||||
}
|
||||
if (load && typeof load === "function") {
|
||||
|
||||
340
package-lock.json
generated
340
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.0.20",
|
||||
"version": "3.1.1",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "opencommit",
|
||||
"version": "3.0.20",
|
||||
"version": "3.1.1",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
@@ -956,38 +956,6 @@
|
||||
"resolved": "https://registry.npmjs.org/@dqbd/tiktoken/-/tiktoken-1.0.13.tgz",
|
||||
"integrity": "sha512-941kjlHjfI97l6NuH/AwuXV4mHuVnRooDcHNSlzi98hz+4ug3wT4gJcWjSwSZHqeGAEn90lC9sFD+8a9d5Jvxg=="
|
||||
},
|
||||
"node_modules/@esbuild/android-arm": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.15.18.tgz",
|
||||
"integrity": "sha512-5GT+kcs2WVGjVs7+boataCkO5Fg0y4kCjzkB5bAip7H4jfnOS3dA6KPiww9W1OEKTKeAcUVhdZGvgI65OXmUnw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@esbuild/linux-loong64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.15.18.tgz",
|
||||
"integrity": "sha512-L4jVKS82XVhw2nvzLg/19ClLWg0y27ulRwuP7lcyL6AbUWB5aPglXY3M21mauDQMDfRLs8cQmeT03r/+X3cZYQ==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/eslint-utils": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz",
|
||||
@@ -3678,54 +3646,6 @@
|
||||
"esbuild-windows-arm64": "0.15.18"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-android-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.15.18.tgz",
|
||||
"integrity": "sha512-wnpt3OXRhcjfIDSZu9bnzT4/TNTDsOUvip0foZOUBG7QbSt//w3QV4FInVJxNhKc/ErhUxc5z4QjHtMi7/TbgA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-android-arm64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.15.18.tgz",
|
||||
"integrity": "sha512-G4xu89B8FCzav9XU8EjsXacCKSG2FT7wW9J6hOc18soEHJdtWu03L3TQDGf0geNxfLTtxENKBzMSq9LlbjS8OQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-darwin-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.15.18.tgz",
|
||||
"integrity": "sha512-2WAvs95uPnVJPuYKP0Eqx+Dl/jaYseZEUUT1sjg97TJa4oBtbAKnPnl3b5M9l51/nbx7+QAEtuummJZW0sBEmg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-darwin-arm64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.15.18.tgz",
|
||||
@@ -3742,262 +3662,6 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-freebsd-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.15.18.tgz",
|
||||
"integrity": "sha512-TT3uBUxkteAjR1QbsmvSsjpKjOX6UkCstr8nMr+q7zi3NuZ1oIpa8U41Y8I8dJH2fJgdC3Dj3CXO5biLQpfdZA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-freebsd-arm64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.15.18.tgz",
|
||||
"integrity": "sha512-R/oVr+X3Tkh+S0+tL41wRMbdWtpWB8hEAMsOXDumSSa6qJR89U0S/PpLXrGF7Wk/JykfpWNokERUpCeHDl47wA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-32": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.15.18.tgz",
|
||||
"integrity": "sha512-lphF3HiCSYtaa9p1DtXndiQEeQDKPl9eN/XNoBf2amEghugNuqXNZA/ZovthNE2aa4EN43WroO0B85xVSjYkbg==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.15.18.tgz",
|
||||
"integrity": "sha512-hNSeP97IviD7oxLKFuii5sDPJ+QHeiFTFLoLm7NZQligur8poNOWGIgpQ7Qf8Balb69hptMZzyOBIPtY09GZYw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-arm": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.15.18.tgz",
|
||||
"integrity": "sha512-UH779gstRblS4aoS2qpMl3wjg7U0j+ygu3GjIeTonCcN79ZvpPee12Qun3vcdxX+37O5LFxz39XeW2I9bybMVA==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-arm64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.15.18.tgz",
|
||||
"integrity": "sha512-54qr8kg/6ilcxd+0V3h9rjT4qmjc0CccMVWrjOEM/pEcUzt8X62HfBSeZfT2ECpM7104mk4yfQXkosY8Quptug==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-mips64le": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.15.18.tgz",
|
||||
"integrity": "sha512-Mk6Ppwzzz3YbMl/ZZL2P0q1tnYqh/trYZ1VfNP47C31yT0K8t9s7Z077QrDA/guU60tGNp2GOwCQnp+DYv7bxQ==",
|
||||
"cpu": [
|
||||
"mips64el"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-ppc64le": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.15.18.tgz",
|
||||
"integrity": "sha512-b0XkN4pL9WUulPTa/VKHx2wLCgvIAbgwABGnKMY19WhKZPT+8BxhZdqz6EgkqCLld7X5qiCY2F/bfpUUlnFZ9w==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-riscv64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.15.18.tgz",
|
||||
"integrity": "sha512-ba2COaoF5wL6VLZWn04k+ACZjZ6NYniMSQStodFKH/Pu6RxzQqzsmjR1t9QC89VYJxBeyVPTaHuBMCejl3O/xg==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-linux-s390x": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.15.18.tgz",
|
||||
"integrity": "sha512-VbpGuXEl5FCs1wDVp93O8UIzl3ZrglgnSQ+Hu79g7hZu6te6/YHgVJxCM2SqfIila0J3k0csfnf8VD2W7u2kzQ==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-netbsd-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.15.18.tgz",
|
||||
"integrity": "sha512-98ukeCdvdX7wr1vUYQzKo4kQ0N2p27H7I11maINv73fVEXt2kyh4K4m9f35U1K43Xc2QGXlzAw0K9yoU7JUjOg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"netbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-openbsd-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.15.18.tgz",
|
||||
"integrity": "sha512-yK5NCcH31Uae076AyQAXeJzt/vxIo9+omZRKj1pauhk3ITuADzuOx5N2fdHrAKPxN+zH3w96uFKlY7yIn490xQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"openbsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-sunos-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.15.18.tgz",
|
||||
"integrity": "sha512-On22LLFlBeLNj/YF3FT+cXcyKPEI263nflYlAhz5crxtp3yRG1Ugfr7ITyxmCmjm4vbN/dGrb/B7w7U8yJR9yw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"sunos"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-windows-32": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.15.18.tgz",
|
||||
"integrity": "sha512-o+eyLu2MjVny/nt+E0uPnBxYuJHBvho8vWsC2lV61A7wwTWC3jkN2w36jtA+yv1UgYkHRihPuQsL23hsCYGcOQ==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-windows-64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.15.18.tgz",
|
||||
"integrity": "sha512-qinug1iTTaIIrCorAUjR0fcBk24fjzEedFYhhispP8Oc7SFvs+XeW3YpAKiKp8dRpizl4YYAhxMjlftAMJiaUw==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild-windows-arm64": {
|
||||
"version": "0.15.18",
|
||||
"resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.15.18.tgz",
|
||||
"integrity": "sha512-q9bsYzegpZcLziq0zgUi5KqGVtfhjxGbnksaBFYmWLxeV/S1fK4OLdq2DFYnXcLMjlZw2L0jLsk1eGoB522WXQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/escalade": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "opencommit",
|
||||
"version": "3.0.20",
|
||||
"version": "3.1.1",
|
||||
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
|
||||
"keywords": [
|
||||
"git",
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
export enum COMMANDS {
|
||||
config = 'config',
|
||||
hook = 'hook',
|
||||
commitlint = 'commitlint'
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { configCommand } from './commands/config';
|
||||
import { hookCommand, isHookCalled } from './commands/githook.js';
|
||||
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
|
||||
import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
|
||||
import { findCommand } from './commands/find';
|
||||
|
||||
const extraArgs = process.argv.slice(2);
|
||||
|
||||
@@ -16,7 +17,12 @@ cli(
|
||||
{
|
||||
version: packageJSON.version,
|
||||
name: 'opencommit',
|
||||
commands: [configCommand, hookCommand, commitlintConfigCommand],
|
||||
commands: [
|
||||
configCommand,
|
||||
hookCommand,
|
||||
commitlintConfigCommand,
|
||||
findCommand
|
||||
],
|
||||
flags: {
|
||||
fgm: Boolean,
|
||||
yes: {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
export enum COMMANDS {
|
||||
config = 'config',
|
||||
hook = 'hook',
|
||||
commitlint = 'commitlint'
|
||||
commitlint = 'commitlint',
|
||||
find = 'find'
|
||||
}
|
||||
|
||||
@@ -50,8 +50,8 @@ const generateCommitMessageFromGitDiff = async ({
|
||||
skipCommitConfirmation = false
|
||||
}: GenerateCommitMessageFromGitDiffParams): Promise<void> => {
|
||||
await assertGitRepo();
|
||||
const commitSpinner = spinner();
|
||||
commitSpinner.start('Generating the commit message');
|
||||
const commitGenerationSpinner = spinner();
|
||||
commitGenerationSpinner.start('Generating the commit message');
|
||||
|
||||
try {
|
||||
let commitMessage = await generateCommitMessageByDiff(
|
||||
@@ -73,7 +73,7 @@ const generateCommitMessageFromGitDiff = async ({
|
||||
);
|
||||
}
|
||||
|
||||
commitSpinner.stop('📝 Commit message generated');
|
||||
commitGenerationSpinner.stop('📝 Commit message generated');
|
||||
|
||||
outro(
|
||||
`Generated commit message:
|
||||
@@ -88,15 +88,20 @@ ${chalk.grey('——————————————————')}`
|
||||
message: 'Confirm the commit message?'
|
||||
}));
|
||||
|
||||
if (isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
|
||||
if (isCancel(isCommitConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isCommitConfirmedByUser) {
|
||||
const committingChangesSpinner = spinner();
|
||||
committingChangesSpinner.start('Committing the changes');
|
||||
const { stdout } = await execa('git', [
|
||||
'commit',
|
||||
'-m',
|
||||
commitMessage,
|
||||
...extraArgs
|
||||
]);
|
||||
|
||||
outro(`${chalk.green('✔')} Successfully committed`);
|
||||
committingChangesSpinner.stop(
|
||||
`${chalk.green('✔')} Successfully committed`
|
||||
);
|
||||
|
||||
outro(stdout);
|
||||
|
||||
@@ -113,7 +118,9 @@ ${chalk.grey('——————————————————')}`
|
||||
message: 'Do you want to run `git push`?'
|
||||
});
|
||||
|
||||
if (isPushConfirmedByUser && !isCancel(isPushConfirmedByUser)) {
|
||||
if (isCancel(isPushConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isPushConfirmedByUser) {
|
||||
const pushSpinner = spinner();
|
||||
|
||||
pushSpinner.start(`Running 'git push ${remotes[0]}'`);
|
||||
@@ -141,28 +148,30 @@ ${chalk.grey('——————————————————')}`
|
||||
options: remotes.map((remote) => ({ value: remote, label: remote }))
|
||||
})) as string;
|
||||
|
||||
if (!isCancel(selectedRemote)) {
|
||||
const pushSpinner = spinner();
|
||||
if (isCancel(selectedRemote)) process.exit(1);
|
||||
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
const pushSpinner = spinner();
|
||||
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
pushSpinner.start(`Running 'git push ${selectedRemote}'`);
|
||||
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
const { stdout } = await execa('git', ['push', selectedRemote]);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
} else outro(`${chalk.gray('✖')} process cancelled`);
|
||||
pushSpinner.stop(
|
||||
`${chalk.green(
|
||||
'✔'
|
||||
)} Successfully pushed all commits to ${selectedRemote}`
|
||||
);
|
||||
|
||||
if (stdout) outro(stdout);
|
||||
}
|
||||
}
|
||||
if (!isCommitConfirmedByUser && !isCancel(isCommitConfirmedByUser)) {
|
||||
} else {
|
||||
const regenerateMessage = await confirm({
|
||||
message: 'Do you want to regenerate the message?'
|
||||
});
|
||||
if (regenerateMessage && !isCancel(isCommitConfirmedByUser)) {
|
||||
|
||||
if (isCancel(regenerateMessage)) process.exit(1);
|
||||
|
||||
if (regenerateMessage) {
|
||||
await generateCommitMessageFromGitDiff({
|
||||
diff,
|
||||
extraArgs,
|
||||
@@ -171,7 +180,7 @@ ${chalk.grey('——————————————————')}`
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
commitSpinner.stop('📝 Commit message generated');
|
||||
commitGenerationSpinner.stop('📝 Commit message generated');
|
||||
|
||||
const err = error as Error;
|
||||
outro(`${chalk.red('✖')} ${err?.message || err}`);
|
||||
@@ -219,10 +228,9 @@ export async function commit(
|
||||
message: 'Do you want to stage all files and generate commit message?'
|
||||
});
|
||||
|
||||
if (
|
||||
isStageAllAndCommitConfirmedByUser &&
|
||||
!isCancel(isStageAllAndCommitConfirmedByUser)
|
||||
) {
|
||||
if (isCancel(isStageAllAndCommitConfirmedByUser)) process.exit(1);
|
||||
|
||||
if (isStageAllAndCommitConfirmedByUser) {
|
||||
await commit(extraArgs, true, fullGitMojiSpec);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -16,14 +16,14 @@ export const commitlintConfigCommand = command(
|
||||
parameters: ['<mode>']
|
||||
},
|
||||
async (argv) => {
|
||||
intro('opencommit — configure @commitlint');
|
||||
intro('OpenCommit — configure @commitlint');
|
||||
try {
|
||||
const { mode } = argv._;
|
||||
|
||||
if (mode === CONFIG_MODES.get) {
|
||||
const commitLintConfig = await getCommitlintLLMConfig();
|
||||
|
||||
outro(commitLintConfig.toString());
|
||||
outro(JSON.stringify(commitLintConfig, null, 2));
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export enum CONFIG_KEYS {
|
||||
OCO_EMOJI = 'OCO_EMOJI',
|
||||
OCO_MODEL = 'OCO_MODEL',
|
||||
OCO_LANGUAGE = 'OCO_LANGUAGE',
|
||||
OCO_WHY = 'OCO_WHY',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
|
||||
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
|
||||
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
|
||||
@@ -376,6 +377,7 @@ export type ConfigType = {
|
||||
[CONFIG_KEYS.OCO_OPENAI_BASE_PATH]?: string;
|
||||
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
|
||||
[CONFIG_KEYS.OCO_EMOJI]: boolean;
|
||||
[CONFIG_KEYS.OCO_WHY]: boolean;
|
||||
[CONFIG_KEYS.OCO_MODEL]: string;
|
||||
[CONFIG_KEYS.OCO_LANGUAGE]: string;
|
||||
[CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER]: string;
|
||||
@@ -422,25 +424,26 @@ enum OCO_PROMPT_MODULE_ENUM {
|
||||
COMMITLINT = '@commitlint'
|
||||
}
|
||||
|
||||
const initGlobalConfig = () => {
|
||||
const defaultConfig = {
|
||||
OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT,
|
||||
OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel('openai'),
|
||||
OCO_LANGUAGE: 'en',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg',
|
||||
OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT,
|
||||
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: 'commit-message',
|
||||
OCO_FLOWISE_ENDPOINT: ':',
|
||||
OCO_GITPUSH: true // todo: deprecate
|
||||
};
|
||||
export const DEFAULT_CONFIG = {
|
||||
OCO_TOKENS_MAX_INPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_INPUT,
|
||||
OCO_TOKENS_MAX_OUTPUT: DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT,
|
||||
OCO_DESCRIPTION: false,
|
||||
OCO_EMOJI: false,
|
||||
OCO_MODEL: getDefaultModel('openai'),
|
||||
OCO_LANGUAGE: 'en',
|
||||
OCO_MESSAGE_TEMPLATE_PLACEHOLDER: '$msg',
|
||||
OCO_PROMPT_MODULE: OCO_PROMPT_MODULE_ENUM.CONVENTIONAL_COMMIT,
|
||||
OCO_AI_PROVIDER: OCO_AI_PROVIDER_ENUM.OPENAI,
|
||||
OCO_ONE_LINE_COMMIT: false,
|
||||
OCO_TEST_MOCK_TYPE: 'commit-message',
|
||||
OCO_FLOWISE_ENDPOINT: ':',
|
||||
OCO_WHY: false,
|
||||
OCO_GITPUSH: true // todo: deprecate
|
||||
};
|
||||
|
||||
writeFileSync(defaultConfigPath, iniStringify(defaultConfig), 'utf8');
|
||||
return defaultConfig;
|
||||
const initGlobalConfig = (configPath: string = defaultConfigPath) => {
|
||||
writeFileSync(configPath, iniStringify(DEFAULT_CONFIG), 'utf8');
|
||||
return DEFAULT_CONFIG;
|
||||
};
|
||||
|
||||
const parseEnvVarValue = (value?: any) => {
|
||||
@@ -451,16 +454,10 @@ const parseEnvVarValue = (value?: any) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const getConfig = ({
|
||||
configPath = defaultConfigPath,
|
||||
envPath = defaultEnvPath
|
||||
}: {
|
||||
configPath?: string;
|
||||
envPath?: string;
|
||||
} = {}): ConfigType => {
|
||||
const getEnvConfig = (envPath: string) => {
|
||||
dotenv.config({ path: envPath });
|
||||
|
||||
const envConfig = {
|
||||
return {
|
||||
OCO_MODEL: process.env.OCO_MODEL,
|
||||
|
||||
OCO_OPENAI_API_KEY: process.env.OCO_OPENAI_API_KEY,
|
||||
@@ -491,33 +488,59 @@ export const getConfig = ({
|
||||
|
||||
OCO_GITPUSH: parseEnvVarValue(process.env.OCO_GITPUSH) // todo: deprecate
|
||||
};
|
||||
};
|
||||
|
||||
const getGlobalConfig = (configPath: string) => {
|
||||
let globalConfig: ConfigType;
|
||||
|
||||
const isGlobalConfigFileExist = existsSync(configPath);
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig();
|
||||
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
|
||||
else {
|
||||
const configFile = readFileSync(configPath, 'utf8');
|
||||
globalConfig = iniParse(configFile) as ConfigType;
|
||||
}
|
||||
|
||||
const mergeObjects = (main: Partial<ConfigType>, fallback: ConfigType) =>
|
||||
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
return globalConfig;
|
||||
};
|
||||
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
/**
|
||||
* Merges two configs.
|
||||
* Env config takes precedence over global ~/.opencommit config file
|
||||
* @param main - env config
|
||||
* @param fallback - global ~/.opencommit config file
|
||||
* @returns merged config
|
||||
*/
|
||||
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) =>
|
||||
Object.keys(CONFIG_KEYS).reduce((acc, key) => {
|
||||
acc[key] = parseEnvVarValue(main[key] ?? fallback[key]);
|
||||
|
||||
// env config takes precedence over global ~/.opencommit config file
|
||||
const config = mergeObjects(envConfig, globalConfig);
|
||||
return acc;
|
||||
}, {} as ConfigType);
|
||||
|
||||
interface GetConfigOptions {
|
||||
globalPath?: string;
|
||||
envPath?: string;
|
||||
}
|
||||
|
||||
export const getConfig = ({
|
||||
envPath = defaultEnvPath,
|
||||
globalPath = defaultConfigPath
|
||||
}: GetConfigOptions = {}): ConfigType => {
|
||||
const envConfig = getEnvConfig(envPath);
|
||||
const globalConfig = getGlobalConfig(globalPath);
|
||||
|
||||
const config = mergeConfigs(envConfig, globalConfig);
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
export const setConfig = (
|
||||
keyValues: [key: string, value: string][],
|
||||
configPath: string = defaultConfigPath
|
||||
globalConfigPath: string = defaultConfigPath
|
||||
) => {
|
||||
const config = getConfig();
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigPath
|
||||
});
|
||||
|
||||
for (let [key, value] of keyValues) {
|
||||
if (!configValidators.hasOwnProperty(key)) {
|
||||
@@ -543,9 +566,7 @@ export const setConfig = (
|
||||
config[key] = validValue;
|
||||
}
|
||||
|
||||
writeFileSync(configPath, iniStringify(config), 'utf8');
|
||||
|
||||
assertConfigsAreValid(config);
|
||||
writeFileSync(globalConfigPath, iniStringify(config), 'utf8');
|
||||
|
||||
outro(`${chalk.green('✔')} config successfully set`);
|
||||
};
|
||||
|
||||
372
src/commands/find.ts
Normal file
372
src/commands/find.ts
Normal file
@@ -0,0 +1,372 @@
|
||||
import {
|
||||
confirm,
|
||||
intro,
|
||||
isCancel,
|
||||
note,
|
||||
outro,
|
||||
select,
|
||||
spinner
|
||||
} from '@clack/prompts';
|
||||
import chalk from 'chalk';
|
||||
import { command } from 'cleye';
|
||||
import { execa } from 'execa';
|
||||
import { getIgnoredFolders } from '../utils/git';
|
||||
import { COMMANDS } from './ENUMS';
|
||||
import { OpenAiEngine } from '../engine/openAi';
|
||||
import { getConfig } from './config';
|
||||
|
||||
type Occurrence = {
|
||||
fileName: string;
|
||||
context: {
|
||||
number: number;
|
||||
content: string;
|
||||
};
|
||||
matches: {
|
||||
number: number;
|
||||
content: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
/*
|
||||
TODO:
|
||||
- [ ] format declarations as file:line => context -> declaration
|
||||
- [ ] format usages as file:line => context -> usage
|
||||
- [ ] expand on usage to see it's call hierarchy
|
||||
- [ ] generate Mermaid diagram
|
||||
*/
|
||||
|
||||
const generateMermaid = async (stdout: string) => {
|
||||
const config = getConfig();
|
||||
|
||||
const DEFAULT_CONFIG = {
|
||||
model: config.OCO_MODEL!,
|
||||
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
|
||||
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
|
||||
baseURL: config.OCO_OPENAI_BASE_PATH!
|
||||
};
|
||||
const engine = new OpenAiEngine({
|
||||
...DEFAULT_CONFIG,
|
||||
apiKey: config.OCO_OPENAI_API_KEY!
|
||||
});
|
||||
|
||||
const diagram = await engine.generateCommitMessage([
|
||||
{
|
||||
role: 'system',
|
||||
content: `You are to generate a mermaid diagram from the given function. Strictly answer in this json format: { "mermaid": "<mermaid diagram>" }. Where <mermaid diagram> is a valid mermaid diagram, e.g:
|
||||
graph TD
|
||||
A[Start] --> B[Generate Commit Message]
|
||||
B --> C{Token count >= Max?}
|
||||
C -->|Yes| D[Process file diffs]
|
||||
C -->|No| E[Generate single message]
|
||||
D --> F[Join messages]
|
||||
E --> G[Generate message]
|
||||
F --> H[End]
|
||||
G --> H
|
||||
B --> I{Error occurred?}
|
||||
I -->|Yes| J[Handle error]
|
||||
J --> H
|
||||
I -->|No| H
|
||||
`
|
||||
},
|
||||
{
|
||||
role: 'user',
|
||||
content: stdout
|
||||
}
|
||||
]);
|
||||
|
||||
return JSON.parse(diagram as string);
|
||||
};
|
||||
|
||||
export function extractFuncName(line: string) {
|
||||
const regex =
|
||||
/(?:function|export\s+const|const|let|var)?\s*(?:async\s+)?(\w+)\s*(?:=\s*(?:async\s*)?\(|\()/;
|
||||
const match = line.match(regex);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
function extractSingle(lineContent: string): string | null {
|
||||
const match = lineContent.match(/\s*(?:public\s+)?(?:async\s+)?(\w+)\s*=/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
function mapLinesToOccurrences(input: string[], step: number = 3) {
|
||||
const occurrences: Occurrence[] = [];
|
||||
let single;
|
||||
|
||||
for (let i = 0; i < input.length; i += step) {
|
||||
if (i + 1 >= input.length) break;
|
||||
|
||||
const [fileName, callerLineNumber, ...callerLineContent] =
|
||||
input[i].split(/[=:]/);
|
||||
const [, definitionLineNumber, ...definitionLineContent] =
|
||||
input[i + 1].split(/[:]/);
|
||||
|
||||
if (!single) single = extractSingle(definitionLineContent.join(':'));
|
||||
|
||||
occurrences.push({
|
||||
fileName,
|
||||
context: {
|
||||
number: parseInt(callerLineNumber, 10),
|
||||
content: callerLineContent.join('=').trim()
|
||||
},
|
||||
matches: [
|
||||
{
|
||||
number: parseInt(definitionLineNumber, 10),
|
||||
content: definitionLineContent.join(':').trim()
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
return { occurrences, single };
|
||||
}
|
||||
|
||||
const findDeclarations = async (query: string[], ignoredFolders: string[]) => {
|
||||
const searchQuery = `(async|function|public).*${query.join('[^ \\n]*')}`;
|
||||
|
||||
outro(`Searching: ${searchQuery}`);
|
||||
|
||||
const occurrences = await findInFiles({ query: searchQuery, ignoredFolders });
|
||||
|
||||
if (!occurrences) return null;
|
||||
|
||||
const declarations = mapLinesToOccurrences(occurrences.split('\n'));
|
||||
|
||||
return declarations;
|
||||
};
|
||||
|
||||
const findUsagesByDeclaration = async (
|
||||
declaration: string,
|
||||
ignoredFolders: string[]
|
||||
) => {
|
||||
const searchQuery = `${declaration}\\(.*\\)`;
|
||||
|
||||
const occurrences = await findInFiles({
|
||||
query: searchQuery,
|
||||
ignoredFolders
|
||||
// grepOptions: ['--function-context']
|
||||
});
|
||||
|
||||
if (!occurrences) return null;
|
||||
|
||||
const usages = mapLinesToOccurrences(
|
||||
occurrences.split('\n').filter(Boolean),
|
||||
2
|
||||
);
|
||||
|
||||
return usages;
|
||||
};
|
||||
|
||||
const buildCallHierarchy = async (
|
||||
query: string[],
|
||||
ignoredFolders: string[]
|
||||
) => {};
|
||||
|
||||
const findInFiles = async ({
|
||||
query,
|
||||
ignoredFolders,
|
||||
grepOptions = []
|
||||
}: {
|
||||
query: string;
|
||||
ignoredFolders: string[];
|
||||
grepOptions?: string[];
|
||||
}): Promise<string | null> => {
|
||||
const withIgnoredFolders =
|
||||
ignoredFolders.length > 0
|
||||
? [
|
||||
'--',
|
||||
' ',
|
||||
'.',
|
||||
' ',
|
||||
ignoredFolders.map((folder) => `:^${folder}`).join(' ')
|
||||
]
|
||||
: [];
|
||||
|
||||
const params = [
|
||||
'--no-pager',
|
||||
'grep',
|
||||
'--show-function', // show function caller
|
||||
'-n',
|
||||
'-i',
|
||||
...grepOptions,
|
||||
'--break',
|
||||
'--color=never',
|
||||
|
||||
// '-C',
|
||||
// '1',
|
||||
|
||||
// '--full-name',
|
||||
// '--heading',
|
||||
'--threads',
|
||||
'10',
|
||||
'-E',
|
||||
query,
|
||||
...withIgnoredFolders
|
||||
];
|
||||
|
||||
try {
|
||||
const { stdout } = await execa('git', params);
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const generatePermutations = (arr: string[]): string[][] => {
|
||||
const n = arr.length;
|
||||
const result: string[][] = [];
|
||||
const indices = new Int32Array(n);
|
||||
|
||||
const current = new Array(n);
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
indices[i] = i;
|
||||
current[i] = arr[i];
|
||||
}
|
||||
result.push([...current]);
|
||||
|
||||
let i = 1;
|
||||
while (i < n) {
|
||||
if (indices[i] > 0) {
|
||||
const j = indices[i] % 2 === 1 ? 0 : indices[i];
|
||||
|
||||
[current[i], current[j]] = [current[j], current[i]];
|
||||
result.push([...current]);
|
||||
indices[i]--;
|
||||
i = 1;
|
||||
} else {
|
||||
indices[i] = i;
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
const shuffleQuery = (query: string[]): string[][] => {
|
||||
return generatePermutations(query);
|
||||
};
|
||||
|
||||
export const findCommand = command(
|
||||
{
|
||||
name: COMMANDS.find,
|
||||
parameters: ['<query...>']
|
||||
},
|
||||
async (argv) => {
|
||||
const query = argv._;
|
||||
|
||||
intro(`OpenCommit — 🔦 find`);
|
||||
const ignoredFolders = getIgnoredFolders();
|
||||
|
||||
const searchSpinner = spinner();
|
||||
let declarations = await findDeclarations(query, ignoredFolders);
|
||||
|
||||
outro(`No matches found. Searching semantically similar queries.`);
|
||||
|
||||
searchSpinner.start(`Searching for matches...`);
|
||||
|
||||
if (!declarations?.occurrences.length) {
|
||||
const allPossibleQueries = shuffleQuery(query).reverse();
|
||||
for (const possibleQuery of allPossibleQueries) {
|
||||
declarations = await findDeclarations(possibleQuery, ignoredFolders);
|
||||
if (declarations?.occurrences.length) break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!declarations?.occurrences.length) {
|
||||
searchSpinner.stop(`${chalk.red('✘')} No function declarations found.`);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
const usages = await findUsagesByDeclaration(
|
||||
declarations.single,
|
||||
ignoredFolders
|
||||
);
|
||||
|
||||
searchSpinner.stop(
|
||||
`${chalk.green('✔')} Found ${chalk.green(
|
||||
declarations.single
|
||||
)} definition and ${usages?.occurrences.length} usages.`
|
||||
);
|
||||
|
||||
note(
|
||||
declarations.occurrences
|
||||
.map((o) =>
|
||||
o.matches
|
||||
.map(
|
||||
(m) =>
|
||||
`${o.fileName}:${m.number} ${chalk.cyan(
|
||||
'==>'
|
||||
)} ${m.content.replace(
|
||||
declarations.single,
|
||||
chalk.green(declarations.single)
|
||||
)}`
|
||||
)
|
||||
.join('\n')
|
||||
)
|
||||
.join('\n'),
|
||||
'⍜ DECLARATIONS ⍜'
|
||||
);
|
||||
|
||||
note(
|
||||
usages?.occurrences
|
||||
.map((o) =>
|
||||
o.matches.map(
|
||||
(m) =>
|
||||
`${o.fileName}:${m.number} ${chalk.cyan(
|
||||
'==>'
|
||||
)} ${m.content.replace(
|
||||
declarations.single,
|
||||
chalk.green(declarations.single)
|
||||
)}`
|
||||
)
|
||||
)
|
||||
.join('\n'),
|
||||
'⌾ USAGES ⌾'
|
||||
);
|
||||
|
||||
const usage = (await select({
|
||||
message: chalk.cyan('Expand usage:'),
|
||||
options: usages!.occurrences
|
||||
.map((o) =>
|
||||
o.matches.map((m) => ({
|
||||
value: { o, m },
|
||||
label: `${chalk.yellow(`${o.fileName}:${m.number}`)} ${chalk.cyan(
|
||||
'==>'
|
||||
)} ${m.content.replace(
|
||||
declarations.single,
|
||||
chalk.green(declarations.single)
|
||||
)}`,
|
||||
hint: `parent: ${extractFuncName(o.context.content) ?? '404'}`
|
||||
}))
|
||||
)
|
||||
.flat()
|
||||
})) as { o: Occurrence; m: any };
|
||||
|
||||
if (isCancel(usage)) process.exit(1);
|
||||
|
||||
const { stdout } = await execa('git', [
|
||||
'--no-pager',
|
||||
'grep',
|
||||
'--function-context',
|
||||
'--heading',
|
||||
'-E',
|
||||
usage.m.content.replace('(', '\\(').replace(')', '\\)'),
|
||||
usage.o.fileName
|
||||
]);
|
||||
|
||||
const mermaidSpinner = spinner();
|
||||
mermaidSpinner.start('Generating mermaid diagram...');
|
||||
const mermaid: any = await generateMermaid(stdout);
|
||||
mermaidSpinner.stop();
|
||||
if (mermaid) console.log(mermaid.mermaid);
|
||||
else note('No mermaid diagram found.');
|
||||
|
||||
const isCommitConfirmedByUser = await confirm({
|
||||
message: 'Create Excalidraw file?'
|
||||
});
|
||||
|
||||
if (isCommitConfirmedByUser) outro('created diagram.excalidraw');
|
||||
else outro('Excalidraw file not created.');
|
||||
}
|
||||
);
|
||||
@@ -35,7 +35,7 @@ export const prepareCommitMessageHook = async (
|
||||
|
||||
if (!staged) return;
|
||||
|
||||
intro('opencommit');
|
||||
intro('OpenCommit');
|
||||
|
||||
const config = getConfig();
|
||||
|
||||
|
||||
@@ -27,9 +27,9 @@ export class AzureEngine implements AiEngine {
|
||||
);
|
||||
}
|
||||
|
||||
generateCommitMessage = async (
|
||||
async generateCommitMessage(
|
||||
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
|
||||
): Promise<string | undefined> => {
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const REQUEST_TOKENS = messages
|
||||
.map((msg) => tokenCount(msg.content as string) + 4)
|
||||
@@ -73,5 +73,5 @@ export class AzureEngine implements AiEngine {
|
||||
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -258,7 +258,9 @@ const INIT_MAIN_PROMPT = (
|
||||
prompts: string[]
|
||||
): OpenAI.Chat.Completions.ChatCompletionMessageParam => ({
|
||||
role: 'system',
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes and WHY the changes were done. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
content: `${IDENTITY} Your mission is to create clean and comprehensive commit messages in the given @commitlint convention and explain WHAT were the changes ${
|
||||
config.OCO_WHY ? 'and WHY the changes were done' : ''
|
||||
}. I'll send you an output of 'git diff --staged' command, and you convert it into a commit message.
|
||||
${
|
||||
config.OCO_EMOJI
|
||||
? 'Use GitMoji convention to preface the commit.'
|
||||
|
||||
@@ -1,13 +1,29 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
|
||||
const findModulePath = (moduleName: string) => {
|
||||
const searchPaths = [
|
||||
path.join('node_modules', moduleName),
|
||||
path.join('node_modules', '.pnpm')
|
||||
];
|
||||
|
||||
for (const basePath of searchPaths) {
|
||||
try {
|
||||
const resolvedPath = require.resolve(moduleName, { paths: [basePath] });
|
||||
return resolvedPath;
|
||||
} catch {
|
||||
// Continue to the next search path if the module is not found
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Cannot find module ${moduleName}`);
|
||||
};
|
||||
|
||||
const getCommitLintModuleType = async (): Promise<'cjs' | 'esm'> => {
|
||||
const packageFile = 'node_modules/@commitlint/load/package.json';
|
||||
const packageJsonPath = path.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
packageFile,
|
||||
);
|
||||
const packageFile = '@commitlint/load/package.json';
|
||||
const packageJsonPath = findModulePath(packageFile);
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
|
||||
if (!packageJson) {
|
||||
throw new Error(`Failed to parse ${packageFile}`);
|
||||
}
|
||||
@@ -19,7 +35,7 @@ const getCommitLintModuleType = async (): Promise<'cjs' | 'esm'> => {
|
||||
* QualifiedConfig from any version of @commitlint/types
|
||||
* @see https://github.com/conventional-changelog/commitlint/blob/master/@commitlint/types/src/load.ts
|
||||
*/
|
||||
type QualifiedConfigOnAnyVersion = { [key:string]: unknown };
|
||||
type QualifiedConfigOnAnyVersion = { [key: string]: unknown };
|
||||
|
||||
/**
|
||||
* This code is loading the configuration for the `@commitlint` package from the current working
|
||||
@@ -27,36 +43,31 @@ type QualifiedConfigOnAnyVersion = { [key:string]: unknown };
|
||||
*
|
||||
* @returns
|
||||
*/
|
||||
export const getCommitLintPWDConfig = async (): Promise<QualifiedConfigOnAnyVersion | null> => {
|
||||
let load, nodeModulesPath;
|
||||
switch (await getCommitLintModuleType()) {
|
||||
case 'cjs':
|
||||
/**
|
||||
* CommonJS (<= commitlint@v18.x.x.)
|
||||
*/
|
||||
nodeModulesPath = path.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
'node_modules/@commitlint/load',
|
||||
);
|
||||
load = require(nodeModulesPath).default;
|
||||
break;
|
||||
case 'esm':
|
||||
/**
|
||||
* ES Module (commitlint@v19.x.x. <= )
|
||||
* Directory import is not supported in ES Module resolution, so import the file directly
|
||||
*/
|
||||
nodeModulesPath = path.join(
|
||||
process.env.PWD || process.cwd(),
|
||||
'node_modules/@commitlint/load/lib/load.js',
|
||||
);
|
||||
load = (await import(nodeModulesPath)).default;
|
||||
break;
|
||||
}
|
||||
export const getCommitLintPWDConfig =
|
||||
async (): Promise<QualifiedConfigOnAnyVersion | null> => {
|
||||
let load: Function, modulePath: string;
|
||||
switch (await getCommitLintModuleType()) {
|
||||
case 'cjs':
|
||||
/**
|
||||
* CommonJS (<= commitlint@v18.x.x.)
|
||||
*/
|
||||
modulePath = findModulePath('@commitlint/load');
|
||||
load = require(modulePath).default;
|
||||
break;
|
||||
case 'esm':
|
||||
/**
|
||||
* ES Module (commitlint@v19.x.x. <= )
|
||||
* Directory import is not supported in ES Module resolution, so import the file directly
|
||||
*/
|
||||
modulePath = await findModulePath('@commitlint/load/lib/load.js');
|
||||
load = (await import(modulePath)).default;
|
||||
break;
|
||||
}
|
||||
|
||||
if (load && typeof load === 'function') {
|
||||
return await load();
|
||||
}
|
||||
if (load && typeof load === 'function') {
|
||||
return await load();
|
||||
}
|
||||
|
||||
// @commitlint/load is not a function
|
||||
return null;
|
||||
};
|
||||
// @commitlint/load is not a function
|
||||
return null;
|
||||
};
|
||||
|
||||
@@ -16,13 +16,18 @@ export const assertGitRepo = async () => {
|
||||
// (file) => `:(exclude)${file}`
|
||||
// );
|
||||
|
||||
export const getIgnoredFolders = (): string[] => {
|
||||
try {
|
||||
return readFileSync('.opencommitignore').toString().split('\n');
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
export const getOpenCommitIgnore = (): Ignore => {
|
||||
const ig = ignore();
|
||||
|
||||
try {
|
||||
ig.add(readFileSync('.opencommitignore').toString().split('\n'));
|
||||
} catch (e) {}
|
||||
|
||||
const ignorePatterns = getIgnoredFolders();
|
||||
ig.add(ignorePatterns);
|
||||
return ig;
|
||||
};
|
||||
|
||||
|
||||
@@ -181,9 +181,7 @@ describe('cli flow to generate commit message using @commitlint prompt-module',
|
||||
[],
|
||||
{ cwd: gitDir }
|
||||
);
|
||||
expect(
|
||||
await commitlintGet.findByText('[object Object]')
|
||||
).toBeInTheConsole();
|
||||
expect(await commitlintGet.findByText('consistency')).toBeInTheConsole();
|
||||
|
||||
// Run 'oco' using .opencommit-commitlint
|
||||
await render('echo', [`'console.log("Hello World");' > index.ts`], {
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
import { getConfig } from '../../src/commands/config';
|
||||
import { existsSync, readFileSync, rmSync } from 'fs';
|
||||
import {
|
||||
DEFAULT_CONFIG,
|
||||
getConfig,
|
||||
setConfig
|
||||
} from '../../src/commands/config';
|
||||
import { prepareFile } from './utils';
|
||||
import { dirname } from 'path';
|
||||
|
||||
describe('getConfig', () => {
|
||||
describe('config', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
let globalConfigFile: { filePath: string; cleanup: () => Promise<void> };
|
||||
let localEnvFile: { filePath: string; cleanup: () => Promise<void> };
|
||||
let envConfigFile: { filePath: string; cleanup: () => Promise<void> };
|
||||
|
||||
function resetEnv(env: NodeJS.ProcessEnv) {
|
||||
Object.keys(process.env).forEach((key) => {
|
||||
@@ -19,7 +25,12 @@ describe('getConfig', () => {
|
||||
beforeEach(async () => {
|
||||
resetEnv(originalEnv);
|
||||
if (globalConfigFile) await globalConfigFile.cleanup();
|
||||
if (localEnvFile) await localEnvFile.cleanup();
|
||||
if (envConfigFile) await envConfigFile.cleanup();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (globalConfigFile) await globalConfigFile.cleanup();
|
||||
if (envConfigFile) await envConfigFile.cleanup();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
@@ -36,115 +47,249 @@ describe('getConfig', () => {
|
||||
return await prepareFile(fileName, fileContent);
|
||||
};
|
||||
|
||||
it('should prioritize local .env over global .opencommit config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-3.5-turbo',
|
||||
OCO_LANGUAGE: 'en'
|
||||
describe('getConfig', () => {
|
||||
it('should prioritize local .env over global .opencommit config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-3.5-turbo',
|
||||
OCO_LANGUAGE: 'en'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'local-key',
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
|
||||
OCO_LANGUAGE: 'fr'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
|
||||
expect(config.OCO_LANGUAGE).toEqual('fr');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
});
|
||||
|
||||
localEnvFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'local-key',
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key',
|
||||
OCO_LANGUAGE: 'fr'
|
||||
it('should fallback to global config when local config is not set', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'de',
|
||||
OCO_DESCRIPTION: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('de');
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
configPath: globalConfigFile.filePath,
|
||||
envPath: localEnvFile.filePath
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_TOKENS_MAX_INPUT: '4096',
|
||||
OCO_TOKENS_MAX_OUTPUT: '500',
|
||||
OCO_GITPUSH: 'true'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_TOKENS_MAX_INPUT: '8192',
|
||||
OCO_ONE_LINE_COMMIT: 'false'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
|
||||
expect(config.OCO_GITPUSH).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('local-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-3.5-turbo');
|
||||
expect(config.OCO_LANGUAGE).toEqual('fr');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('es');
|
||||
});
|
||||
|
||||
it('should override global config with null values in local .env', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
envConfigFile = await generateConfig('.env', {
|
||||
OCO_OPENAI_API_KEY: 'null'
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
|
||||
});
|
||||
|
||||
it('should handle empty global config', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
envConfigFile = await generateConfig('.env', {});
|
||||
|
||||
const config = getConfig({
|
||||
globalPath: globalConfigFile.filePath,
|
||||
envPath: envConfigFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
it('should fallback to global config when local config is not set', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'de',
|
||||
OCO_DESCRIPTION: 'true'
|
||||
describe('setConfig', () => {
|
||||
beforeEach(async () => {
|
||||
// we create and delete the file to have the parent directory, but not the file, to test the creation of the file
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
rmSync(globalConfigFile.filePath);
|
||||
});
|
||||
|
||||
localEnvFile = await generateConfig('.env', {
|
||||
OCO_ANTHROPIC_API_KEY: 'local-anthropic-key'
|
||||
it('should create .opencommit file with DEFAULT CONFIG if it does not exist on first setConfig run', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key_1']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key_1');
|
||||
Object.entries(DEFAULT_CONFIG).forEach(([key, value]) => {
|
||||
expect(fileContent).toContain(`${key}=${value}`);
|
||||
});
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
configPath: globalConfigFile.filePath,
|
||||
envPath: localEnvFile.filePath
|
||||
it('should set new config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_OPENAI_API_KEY', 'new-key'],
|
||||
['OCO_MODEL', 'gpt-4']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('new-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_ANTHROPIC_API_KEY).toEqual('local-anthropic-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('de');
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
});
|
||||
it('should update existing config values', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'initial-key'
|
||||
});
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'updated-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_TOKENS_MAX_INPUT: '4096',
|
||||
OCO_TOKENS_MAX_OUTPUT: '500',
|
||||
OCO_GITPUSH: 'true'
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('updated-key');
|
||||
});
|
||||
|
||||
localEnvFile = await generateConfig('.env', {
|
||||
OCO_TOKENS_MAX_INPUT: '8192',
|
||||
OCO_ONE_LINE_COMMIT: 'false'
|
||||
it('should handle boolean and numeric values correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
await setConfig(
|
||||
[
|
||||
['OCO_TOKENS_MAX_INPUT', '8192'],
|
||||
['OCO_DESCRIPTION', 'true'],
|
||||
['OCO_ONE_LINE_COMMIT', 'false']
|
||||
],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const config = getConfig({ globalPath: globalConfigFile.filePath });
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_DESCRIPTION).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
});
|
||||
|
||||
const config = getConfig({
|
||||
configPath: globalConfigFile.filePath,
|
||||
envPath: localEnvFile.filePath
|
||||
it('should throw an error for unsupported config keys', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {});
|
||||
|
||||
try {
|
||||
await setConfig(
|
||||
[['UNSUPPORTED_KEY', 'value']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
throw new Error('NEVER_REACHED');
|
||||
} catch (error) {
|
||||
expect(error.message).toContain(
|
||||
'Unsupported config key: UNSUPPORTED_KEY'
|
||||
);
|
||||
expect(error.message).not.toContain('NEVER_REACHED');
|
||||
}
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_TOKENS_MAX_INPUT).toEqual(8192);
|
||||
expect(config.OCO_TOKENS_MAX_OUTPUT).toEqual(500);
|
||||
expect(config.OCO_GITPUSH).toEqual(true);
|
||||
expect(config.OCO_ONE_LINE_COMMIT).toEqual(false);
|
||||
});
|
||||
it('should persist changes to the config file', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
it('should handle empty local config correctly', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
});
|
||||
|
||||
localEnvFile = await generateConfig('.env', {});
|
||||
it('should set multiple configs in a row and keep the changes', async () => {
|
||||
const isGlobalConfigFileExist = existsSync(globalConfigFile.filePath);
|
||||
expect(isGlobalConfigFileExist).toBe(false);
|
||||
|
||||
const config = getConfig({
|
||||
configPath: globalConfigFile.filePath,
|
||||
envPath: localEnvFile.filePath
|
||||
await setConfig(
|
||||
[['OCO_OPENAI_API_KEY', 'persisted-key']],
|
||||
globalConfigFile.filePath
|
||||
);
|
||||
|
||||
const fileContent1 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent1).toContain('OCO_OPENAI_API_KEY=persisted-key');
|
||||
|
||||
await setConfig([['OCO_MODEL', 'gpt-4']], globalConfigFile.filePath);
|
||||
|
||||
const fileContent2 = readFileSync(globalConfigFile.filePath, 'utf8');
|
||||
expect(fileContent2).toContain('OCO_MODEL=gpt-4');
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual('global-key');
|
||||
expect(config.OCO_MODEL).toEqual('gpt-4');
|
||||
expect(config.OCO_LANGUAGE).toEqual('es');
|
||||
});
|
||||
|
||||
it('should override global config with null values in local .env', async () => {
|
||||
globalConfigFile = await generateConfig('.opencommit', {
|
||||
OCO_OPENAI_API_KEY: 'global-key',
|
||||
OCO_MODEL: 'gpt-4',
|
||||
OCO_LANGUAGE: 'es'
|
||||
});
|
||||
|
||||
localEnvFile = await generateConfig('.env', { OCO_OPENAI_API_KEY: 'null' });
|
||||
|
||||
const config = getConfig({
|
||||
configPath: globalConfigFile.filePath,
|
||||
envPath: localEnvFile.filePath
|
||||
});
|
||||
|
||||
expect(config).not.toEqual(null);
|
||||
expect(config.OCO_OPENAI_API_KEY).toEqual(null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import path from 'path';
|
||||
import { mkdtemp, rm, writeFile } from 'fs';
|
||||
import { promisify } from 'util';
|
||||
import { existsSync, mkdtemp, rm, writeFile } from 'fs';
|
||||
import { tmpdir } from 'os';
|
||||
import path from 'path';
|
||||
import { promisify } from 'util';
|
||||
const fsMakeTempDir = promisify(mkdtemp);
|
||||
const fsRemove = promisify(rm);
|
||||
const fsWriteFile = promisify(writeFile);
|
||||
@@ -20,7 +20,9 @@ export async function prepareFile(
|
||||
const filePath = path.resolve(tempDir, fileName);
|
||||
await fsWriteFile(filePath, content);
|
||||
const cleanup = async () => {
|
||||
return fsRemove(tempDir, { recursive: true });
|
||||
if (existsSync(tempDir)) {
|
||||
await fsRemove(tempDir, { recursive: true });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"lib": ["ES6", "ES2020"],
|
||||
|
||||
"module": "CommonJS",
|
||||
|
||||
|
||||
"resolveJsonModule": true,
|
||||
"moduleResolution": "Node",
|
||||
|
||||
@@ -21,9 +21,7 @@
|
||||
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": [
|
||||
"test/jest-setup.ts"
|
||||
],
|
||||
"include": ["test/jest-setup.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
"ts-node": {
|
||||
"esm": true,
|
||||
|
||||
Reference in New Issue
Block a user