Compare commits

..

5 Commits

Author SHA1 Message Date
di-sukharev
25468f67ad 1.1.8 2023-03-14 18:50:57 +08:00
Benny Neugebauer
6766f62848 Replace type assertion with built-in error detection (#10)
* refactor(api.ts): use built-in axios error detection
2023-03-14 18:50:25 +08:00
di-sukharev
71c36db265 1.1.7 2023-03-13 16:44:36 +08:00
di-sukharev
ed66e403e7 1.1.6 2023-03-13 16:44:19 +08:00
di-sukharev
b89e50ebbf * fix(generateCommitMessageFromGitDiff.ts): fix formatting of commit message prompt
* feat(generateCommitMessageFromGitDiff.ts): add description to commit message prompt
2023-03-13 16:43:59 +08:00
4 changed files with 9 additions and 13 deletions

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "opencommit",
"version": "1.1.5",
"version": "1.1.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "opencommit",
"version": "1.1.5",
"version": "1.1.8",
"license": "ISC",
"dependencies": {
"@clack/prompts": "^0.6.1",

View File

@@ -1,6 +1,6 @@
{
"name": "opencommit",
"version": "1.1.5",
"version": "1.1.8",
"description": "GPT CLI to auto-generate impressive commits in 1 second. Killing lame commits with AI 🤯🔫",
"keywords": [
"git",

View File

@@ -1,5 +1,5 @@
import { intro, outro } from '@clack/prompts';
import { AxiosError } from 'axios';
import axios from 'axios';
import chalk from 'chalk';
import {
ChatCompletionRequestMessage,
@@ -50,15 +50,11 @@ class OpenAi {
const message = data.choices[0].message;
return message?.content;
} catch (error: any) {
} catch (error: unknown) {
outro(`${chalk.red('✖')} ${error}`);
if (error.isAxiosError && error.response?.status === 401) {
const err = error as AxiosError;
const openAiError = (
err.response?.data as { error?: { message: string } }
).error;
if (axios.isAxiosError<{ error?: { message: string } }>(error) && error.response?.status === 401) {
const openAiError = error.response.data.error;
if (openAiError?.message) outro(openAiError.message);
outro(

View File

@@ -49,8 +49,8 @@ const INIT_MESSAGES_PROMPT: Array<ChatCompletionRequestMessage> = [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
// prettier-ignore
content: `* ${config?.emoji ? '🐛 ' : ''}fix(server.ts): change port variable case from lowercase port to uppercase PORT
* ${config?.emoji ? '✨ ' : ''}feat(server.ts): add support for process.env.PORT environment variable
content: `${config?.emoji ? '🐛 ' : ''}fix(server.ts): change port variable case from lowercase port to uppercase PORT
${config?.emoji ? '✨ ' : ''}feat(server.ts): add support for process.env.PORT environment variable
${config?.description ? 'The port variable is now named PORT, which improves consistency with the naming conventions as PORT is a constant. Support for an environment variable allows the application to be more flexible as it can now run on any available port specified via the process.env.PORT environment variable.' : ''}`
}
];