Compare commits

..

27 Commits

Author SHA1 Message Date
di-sukharev
53ae8926fa 3.0.6 2024-02-27 22:17:06 +08:00
di-sukharev
6c743ba230 chore(README.md): update link to GitHub 2023 hackathon in README
The link to the GitHub 2023 hackathon in the README has been updated to point to the correct URL. This ensures that users can easily access the relevant information about the hackathon.
2024-02-27 22:16:23 +08:00
di-sukharev
9852c36a98 build 2024-02-27 22:07:18 +08:00
di-sukharev
5f85cafc7e 3.0.5 2024-02-27 22:07:16 +08:00
di-sukharev
0591e6e81e docs(README.md): add alternative ways to run OpenCommit via npx and bunx
The README.md file has been updated to include alternative ways to run OpenCommit via the `npx opencommit` and `bunx opencommit` commands. This provides users with more options to execute the tool without the need for a global installation.
2024-02-27 22:06:08 +08:00
di-sukharev
a296892aaf build 2024-02-27 22:04:39 +08:00
di-sukharev
45958284c9 Merge branch 'master' into dev 2024-02-27 22:04:13 +08:00
Jaroslaw Weber
1d6980faf3 add ollama support (#269)
* add ollama support

---------

Co-authored-by: di-sukharev <dim.sukharev@gmail.com>
Co-authored-by: GPT10 <57486732+di-sukharev@users.noreply.github.com>
Co-authored-by: Jaroslaw Weber <jaroslaw.weber@adriel.com>
2024-02-27 22:01:13 +08:00
di-sukharev
f793f01059 Merge branch 'master' of github.com:di-sukharev/opencommit 2023-11-27 12:14:13 +08:00
di-sukharev
7deffa8ee2 update twitter 2023-11-27 12:00:28 +08:00
GPT10
84dfc85328 Update README.md
fix(README.md): GitHub hackathon link
2023-10-05 22:05:45 +08:00
di-sukharev
b79aef5fad build 2023-09-18 14:23:43 +08:00
di-sukharev
57d9cc59b5 fix(config.ts): add default value for config parameter in OCO_OPENAI_API_KEY validator function
The OCO_OPENAI_API_KEY validator function now accepts an optional config parameter with a default value of an empty object {}. This change ensures that the function can be called without providing the config parameter, improving flexibility and avoiding potential errors when the parameter is not provided.
2023-09-18 14:21:55 +08:00
di-sukharev
e599700d72 build 2023-09-09 15:52:29 +08:00
di-sukharev
2761403735 Merge remote-tracking branch 'origin/master' into dev 2023-09-09 15:52:13 +08:00
di-sukharev
e57033c4a1 chore(README.md): update information about default model and cost
The default model used by OpenCommit is ChatGPT (3.5-turbo-16k) official model. It is mentioned that the cost should not exceed $0.10 per casual working day. Additionally, it is now mentioned that users have the option to switch to gpt-4, which is better but more expensive. This update provides users with more information about the available models and their associated costs.
2023-09-05 20:30:39 +08:00
di-sukharev
ca049e4b5d chore(package.json): update deploy script to push tags before publishing to npm
The deploy script in package.json has been updated to push tags before publishing to npm. This ensures that the latest version tag is pushed to the remote repository before publishing the package.
2023-09-05 13:57:20 +08:00
di-sukharev
2d48648f52 chore(package.json): update version to 3.0.3
The version in package.json has been updated from 3.0.2 to 3.0.3. This change is made to reflect the latest version of the project.
2023-09-05 13:51:21 +08:00
di-sukharev
40297e0c6a chore(package.json): update version to 3.0.2
The version in package.json has been updated from 3.0.1 to 3.0.2. This change is made to reflect the latest version of the project.
2023-09-05 13:49:17 +08:00
di-sukharev
75f0cd47b8 chore(package.json): update version to 3.0.1
The version in package.json has been updated from 3.0.0 to 3.0.1. This change is made to reflect the new version of the project.
2023-09-05 13:48:09 +08:00
Sukharev
c76313737d #247: fix (#248) 2023-09-05 13:45:34 +08:00
di-sukharev
a2b1890e7e build 2023-09-05 13:42:30 +08:00
di-sukharev
df705b97b7 Merge remote-tracking branch 'origin/master' into dev 2023-09-05 13:42:22 +08:00
di-sukharev
df280b7db7 fix(commitlint): use process.cwd() as fallback for process.env.PWD to ensure compatibility across platforms
The commitlint module now uses process.cwd() as a fallback when process.env.PWD is not available. This ensures compatibility across different platforms and environments when determining the path to the node_modules directory.
2023-09-05 13:39:49 +08:00
di-sukharev
ac8c87be9e docs(README.md): add note about MacOS potentially requiring 'sudo' when installing package globally
The README.md file now includes a note that MacOS may prompt the user to run the 'npm install -g opencommit' command with 'sudo' when installing the package globally. This note is added to provide clarity and help users who encounter this situation.
2023-09-03 15:22:47 +08:00
di-sukharev
a9050fda39 build 2023-09-03 15:17:12 +08:00
Sukharev
b98b892ba1 v3.0.0 (#243)
* update major version

---------

Co-authored-by: E Einowski <5124085+EwiththeBowtie@users.noreply.github.com>
Co-authored-by: Max Zavodniuk <lovemaxonly@gmail.com>
Co-authored-by: Malthe Poulsen <30603252+malpou@users.noreply.github.com>
Co-authored-by: Sébastien Fichot <fichot.sebastien@gmail.com>
2023-09-03 15:16:02 +08:00
20 changed files with 2968 additions and 7109 deletions

View File

@@ -2,13 +2,12 @@
<div>
<img src=".github/logo-grad.svg" alt="OpenCommit logo"/>
<h1 align="center">OpenCommit</h1>
<h4 align="center">Follow the bird <a href="https://twitter.com/io_Y_oi"><img src="https://img.shields.io/twitter/follow/io_Y_oi?style=flat&label=io_Y_oi&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
<h4 align="center">Follow the bird <a href="https://twitter.com/_sukharev_"><img src="https://img.shields.io/twitter/follow/_sukharev_?style=flat&label=_sukharev_&logo=twitter&color=0bf&logoColor=fff" align="center"></a>
</div>
<h2>Auto-generate meaningful commits in 1 second</h2>
<p>Killing lame commits with AI 🤯🔫</p>
<a href="https://www.npmjs.com/package/opencommit"><img src="https://img.shields.io/npm/v/opencommit" alt="Current version"></a>
<h4 align="center">🪩 Winner of GitHub 2023 HACKATHON <a href="https://twitter.com/io_Y_oi/status/1683448136973582336"><img style="width:14px; height:14px; margin-top: -4px" src=".github/github-mark-white.png" align="center"></a>
</h4>
<h4 align="center">🪩 Winner of <a href="https://twitter.com/_sukharev_/status/1683448136973582336">GitHub 2023 hackathon</a> 🪩</h4>
</div>
---
@@ -29,6 +28,10 @@ You can use OpenCommit by simply running it via the CLI like this `oco`. 2 secon
npm install -g opencommit
```
Alternatively run it via `npx opencommit` or `bunx opencommit`
MacOS may ask to run the command with `sudo` when installing a package globally.
2. Get your API key from [OpenAI](https://platform.openai.com/account/api-keys). Make sure that you add your payment details, so the API works.
3. Set the key to OpenCommit config:
@@ -55,6 +58,17 @@ git add <files...>
oco
```
You can also run it with local model through ollama:
- install and start ollama
- run `ollama run mistral` (do this only once, to pull model)
- run (in your project directory):
```sh
git add <files...>
AI_PROVIDER='ollama' opencommit
```
## Configuration
### Local per repo configuration
@@ -330,4 +344,6 @@ You pay for your requests to OpenAI API on your own.
OpenCommit stores your key locally.
OpenCommit by default uses ChatGPT (3.5-turbo-16k) official model, which is a lot cheaper than gpt-4.
OpenCommit by default uses 3.5-turbo-16k model, it should not exceed $0.10 per casual working day.
You may switch to gpt-4, it's better, but more expensive.

View File

@@ -1,12 +0,0 @@
// @ts-ignore
// import { jest } from '@jest/globals';
import { generateCommitMessageByDiff } from '../src/generateCommitMessageFromGitDiff';
test.skip('generateCommitMessageFromGitDiff', async () => {
const GIT_DIFF = ``;
const res = await generateCommitMessageByDiff(GIT_DIFF);
expect(res).toBe('lol');
});

View File

@@ -1,79 +0,0 @@
import { getCommitMsgsPromisesFromFileDiffs } from '../src/generateCommitMessageFromGitDiff';
const oneFileThreeChanges = `diff --git a/example.txt b/example.txt
index e69de29..3f6a3fa 100644
--- a/example.txt
+++ b/example.txt
@@ -1,2 +1,2 @@
-Hello, World!
+Hello, everyone!
This is an example file.
@@ -4,2 +4,2 @@
-Goodbye, World!
+Goodbye, everyone!
Have a great day!
@@ -7,2 +7,2 @@
-It's a sunny day!
+It's a rainy day!
Let's go for a walk.`;
const fourFilesOneChangeEach = `diff --git a/file1.txt b/file1.txt
index e69de29..3f6a3fa 100644
--- a/file1.txt
+++ b/file1.txt
@@ -1,2 +1,2 @@
-Hello, World!
+Hello, everyone!
This is file 1.
diff --git a/file2.txt b/file2.txt
index 87c0ddc..d7b182e 100644
--- a/file2.txt
+++ b/file2.txt
@@ -1,3 +1,3 @@
This is file 2.
-Goodbye, World!
+Goodbye, everyone!
Have a great day!
diff --git a/file3.txt b/file3.txt
index e69de29..3f6a3fa 100644
--- a/file3.txt
+++ b/file3.txt
@@ -1,4 +1,4 @@
This is file 3.
-It's a sunny day!
+It's a rainy day!
Let's go for a walk.
diff --git a/file4.txt b/file4.txt
index 3f6a3fa..87c0ddc 100644
--- a/file4.txt
+++ b/file4.txt
@@ -1,5 +1,5 @@
This is file 4.
-It's time to sleep.
+It's time to wake up.
Goodnight.
`;
test('1', async () => {
const MAX_LENGTH = 50;
const oneFile3Changes = await getCommitMsgsPromisesFromFileDiffs(
oneFileThreeChanges,
MAX_LENGTH
);
expect(oneFile3Changes).toBe('lol');
});
test('2', async () => {
const MAX_LENGTH = 50;
const fourFilesOneChange = await getCommitMsgsPromisesFromFileDiffs(
fourFilesOneChangeEach,
MAX_LENGTH
);
expect(fourFilesOneChange).toBe('lol');
});

View File

@@ -1,21 +0,0 @@
import type { JestConfigWithTsJest } from 'ts-jest';
const jestConfig: JestConfigWithTsJest = {
// [...]
extensionsToTreatAsEsm: ['.ts'],
moduleNameMapper: {
'^(\\.{1,2}/.*)\\.js$': '$1'
},
transform: {
// '^.+\\.[tj]sx?$' to process js/ts with `ts-jest`
// '^.+\\.m?[tj]sx?$' to process js/ts/mjs/mts with `ts-jest`
'^.+\\.tsx?$': [
'ts-jest',
{
useESM: true
}
]
}
};
export default jestConfig;

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

4286
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "opencommit",
"version": "3.0.0",
"version": "3.0.6",
"description": "Auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",
@@ -12,7 +12,8 @@
"aicommit",
"aicommits",
"gptcommit",
"commit"
"commit",
"ollama"
],
"main": "cli.js",
"bin": {
@@ -40,28 +41,25 @@
"scripts": {
"watch": "npm run -S build -- --sourcemap --watch",
"start": "node ./out/cli.cjs",
"ollama:start": "OCO_AI_PROVIDER='ollama' node ./out/cli.cjs",
"dev": "ts-node ./src/cli.ts",
"build": "rimraf out && node esbuild.config.js",
"build:push": "npm run build && git add . && git commit -m 'build' && git push",
"deploy": "npm run build:push && npm version patch && git push --tags && npm publish --tag latest",
"deploy": "npm version patch && npm run build:push && git push --tags && npm publish --tag latest",
"lint": "eslint src --ext ts && tsc --noEmit",
"format": "prettier --write src",
"test": "NODE_OPTIONS=--experimental-vm-modules jest --verbose --coverage --config jest.config.ts"
"format": "prettier --write src"
},
"devDependencies": {
"@commitlint/types": "^17.4.4",
"@types/ini": "^1.3.31",
"@types/inquirer": "^9.0.3",
"@types/jest": "^29.5.4",
"@types/node": "^16.18.14",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"dotenv": "^16.0.3",
"esbuild": "^0.15.18",
"eslint": "^8.28.0",
"jest": "^29.6.4",
"prettier": "^2.8.4",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"typescript": "^4.9.3"
},
@@ -73,6 +71,7 @@
"@dqbd/tiktoken": "^1.0.2",
"@octokit/webhooks-schemas": "^6.11.0",
"@octokit/webhooks-types": "^6.11.0",
"ai": "^2.2.14",
"axios": "^1.3.4",
"chalk": "^5.2.0",
"cleye": "^1.3.2",

View File

@@ -199,17 +199,17 @@ export async function commit(
}
if (stagedFiles.length === 0 && changedFiles.length > 0) {
const files = await multiselect({
const files = (await multiselect({
message: chalk.cyan('Select the files you want to add to the commit:'),
options: changedFiles.map((file) => ({
value: file,
label: file
}))
});
})) as string[];
if (isCancel(files)) process.exit(1);
await gitAdd({ files: files as string[] });
await gitAdd({ files });
}
await commit(extraArgs, false);

View File

@@ -22,7 +22,8 @@ export enum CONFIG_KEYS {
OCO_MODEL = 'OCO_MODEL',
OCO_LANGUAGE = 'OCO_LANGUAGE',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER = 'OCO_MESSAGE_TEMPLATE_PLACEHOLDER',
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE'
OCO_PROMPT_MODULE = 'OCO_PROMPT_MODULE',
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
}
export const DEFAULT_MODEL_TOKEN_LIMIT = 4096;
@@ -47,8 +48,9 @@ const validateConfig = (
};
export const configValidators = {
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config?: any) {
validateConfig(CONFIG_KEYS.OCO_OPENAI_API_KEY, value, 'Cannot be empty');
[CONFIG_KEYS.OCO_OPENAI_API_KEY](value: any, config: any = {}) {
//need api key unless running locally with ollama
validateConfig('API_KEY', value || config.OCO_AI_PROVIDER == 'ollama', 'You need to provide an API key');
validateConfig(
CONFIG_KEYS.OCO_OPENAI_API_KEY,
value.startsWith('sk-'),
@@ -150,7 +152,20 @@ export const configValidators = {
);
return value;
}
},
[CONFIG_KEYS.OCO_AI_PROVIDER](value: any) {
validateConfig(
CONFIG_KEYS.OCO_AI_PROVIDER,
[
'',
'openai',
'ollama'
].includes(value),
`${value} is not supported yet, use 'ollama' or 'openai' (default)`
);
return value;
},
};
export type ConfigType = {
@@ -172,7 +187,8 @@ export const getConfig = (): ConfigType | null => {
OCO_LANGUAGE: process.env.OCO_LANGUAGE || 'en',
OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER || '$msg',
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit'
OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE || 'conventional-commit',
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER || 'openai'
};
const configExists = existsSync(configPath);
@@ -198,9 +214,7 @@ export const getConfig = (): ConfigType | null => {
config[configKey] = validValue;
} catch (error) {
outro(
`'${configKey}' name is invalid, it should be either 'OCO_${configKey.toUpperCase()}' or it doesn't exist.`
);
outro(`Unknown '${configKey}' config option.`);
outro(
`Manually fix the '.env' file or global '~/.opencommit' config file.`
);

7
src/engine/Engine.ts Normal file
View File

@@ -0,0 +1,7 @@
import { ChatCompletionRequestMessage } from 'openai';
export interface AiEngine {
generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined>;
}

36
src/engine/ollama.ts Normal file
View File

@@ -0,0 +1,36 @@
import axios, { AxiosError } from 'axios';
import { ChatCompletionRequestMessage } from 'openai';
import { AiEngine } from './Engine';
export class OllamaAi implements AiEngine {
async generateCommitMessage(
messages: Array<ChatCompletionRequestMessage>
): Promise<string | undefined> {
const model = 'mistral'; // todo: allow other models
let prompt = messages.map((x) => x.content).join('\n');
//hoftix: local models are not so clever so im changing the prompt a bit...
prompt += 'Summarize above git diff in 10 words or less';
const url = 'http://localhost:11434/api/generate';
const p = {
model,
prompt,
stream: false
};
try {
const response = await axios.post(url, p, {
headers: {
'Content-Type': 'application/json'
}
});
const answer = response.data?.response;
return answer;
} catch (err: any) {
const message = err.response?.data?.error ?? err.message;
throw new Error('local model issues. details: ' + message);
}
}
}
export const ollamaAi = new OllamaAi();

View File

@@ -13,23 +13,27 @@ import {
CONFIG_MODES,
DEFAULT_MODEL_TOKEN_LIMIT,
getConfig
} from './commands/config';
import { GenerateCommitMessageErrorEnum } from './generateCommitMessageFromGitDiff';
import { tokenCount } from './utils/tokenCount';
} from '../commands/config';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine } from './Engine';
const config = getConfig();
let maxTokens = config?.OCO_OPENAI_MAX_TOKENS;
let basePath = config?.OCO_OPENAI_BASE_PATH;
let apiKey = config?.OCO_OPENAI_API_KEY;
let apiKey = config?.OCO_OPENAI_API_KEY
const [command, mode] = process.argv.slice(2);
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
const isLocalModel = config?.OCO_AI_PROVIDER == 'ollama'
if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set && !isLocalModel) {
intro('opencommit');
outro(
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token>. Make sure you add payment details, so API works.`'
'OCO_OPENAI_API_KEY is not set, please run `oco config set OCO_OPENAI_API_KEY=<your token> . If you are using GPT, make sure you add payment details, so API works.`'
);
outro(
'For help look into README https://github.com/di-sukharev/opencommit#setup'
@@ -40,7 +44,7 @@ if (!apiKey && command !== 'config' && mode !== CONFIG_MODES.set) {
const MODEL = config?.OCO_MODEL || 'gpt-3.5-turbo';
class OpenAi {
class OpenAi implements AiEngine {
private openAiApiConfiguration = new OpenAiApiConfiguration({
apiKey: apiKey
});
@@ -100,16 +104,6 @@ class OpenAi {
};
}
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};
export const api = new OpenAi();

View File

@@ -3,11 +3,11 @@ import {
ChatCompletionRequestMessageRoleEnum
} from 'openai';
import { api } from './api';
import { DEFAULT_MODEL_TOKEN_LIMIT, getConfig } from './commands/config';
import { getMainCommitPrompt } from './prompts';
import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount';
import { getEngine } from './utils/engine';
const config = getConfig();
@@ -67,7 +67,8 @@ export const generateCommitMessageByDiff = async (
const messages = await generateCommitMessageChatCompletionPrompt(diff);
const commitMessage = await api.generateCommitMessage(messages);
const engine = getEngine()
const commitMessage = await engine.generateCommitMessage(messages);
if (!commitMessage)
throw new Error(GenerateCommitMessageErrorEnum.emptyMessage);
@@ -104,13 +105,14 @@ function getMessagesPromisesByChangesInFile(
}
}
const engine = getEngine()
const commitMsgsFromFileLineDiffs = lineDiffsWithHeader.map(
async (lineDiff) => {
const messages = await generateCommitMessageChatCompletionPrompt(
separator + lineDiff
);
return api.generateCommitMessage(messages);
return engine.generateCommitMessage(messages);
}
);
@@ -177,7 +179,8 @@ export const getCommitMsgsPromisesFromFileDiffs = async (
separator + fileDiff
);
commitMessagePromises.push(api.generateCommitMessage(messages));
const engine = getEngine()
commitMessagePromises.push(engine.generateCommitMessage(messages));
}
}

View File

@@ -1,6 +1,5 @@
import { spinner } from '@clack/prompts';
import { api } from '../../api';
import { getConfig } from '../../commands/config';
import { i18n, I18nLocals } from '../../i18n';
import { COMMITLINT_LLM_CONFIG_PATH } from './constants';
@@ -9,6 +8,7 @@ import { commitlintPrompts, inferPromptsFromCommitlintConfig } from './prompts';
import { getCommitLintPWDConfig } from './pwd-commitlint';
import { CommitlintLLMConfig } from './types';
import * as utils from './utils';
import { getEngine } from '../../utils/engine';
const config = getConfig();
const translation = i18n[(config?.OCO_LANGUAGE as I18nLocals) || 'en'];
@@ -55,8 +55,9 @@ export const configureCommitlintIntegration = async (force = false) => {
// consistencyPrompts.map((p) => p.content)
// );
const engine = getEngine()
let consistency =
(await api.generateCommitMessage(consistencyPrompts)) || '{}';
(await engine.generateCommitMessage(consistencyPrompts)) || '{}';
// Cleanup the consistency answer. Sometimes 'gpt-3.5-turbo' sends rule's back.
prompts.forEach((prompt) => (consistency = consistency.replace(prompt, '')));

View File

@@ -1,7 +1,7 @@
import path from 'path';
const nodeModulesPath = path.join(
process.env.PWD as string,
process.env.PWD || process.cwd(),
'node_modules',
'@commitlint',
'load'

View File

@@ -3,7 +3,7 @@ import chalk from 'chalk';
import { outro } from '@clack/prompts';
import currentPackage from '../../package.json';
import { getOpenCommitLatestVersion } from '../api';
import { getOpenCommitLatestVersion } from '../version';
export const checkIsLatestVersion = async () => {
const latestVersion = await getOpenCommitLatestVersion();

13
src/utils/engine.ts Normal file
View File

@@ -0,0 +1,13 @@
import { AiEngine } from '../engine/Engine';
import { api } from '../engine/openAi';
import { getConfig } from '../commands/config';
import { ollamaAi } from '../engine/ollama';
export function getEngine(): AiEngine {
const config = getConfig();
if (config?.OCO_AI_PROVIDER == 'ollama') {
return ollamaAi;
}
//open ai gpt by default
return api;
}

14
src/version.ts Normal file
View File

@@ -0,0 +1,14 @@
import { outro } from "@clack/prompts";
import { execa } from "execa";
export const getOpenCommitLatestVersion = async (): Promise<
string | undefined
> => {
try {
const { stdout } = await execa('npm', ['view', 'opencommit', 'version']);
return stdout;
} catch (_) {
outro('Error while getting the latest version of opencommit');
return undefined;
}
};

View File

@@ -1,9 +1,9 @@
{
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"lib": ["ES5", "ES6"],
"module": "ESNext",
// "rootDir": "./src",
"resolveJsonModule": true,
"moduleResolution": "node",