diff --git a/out/cli.cjs b/out/cli.cjs index 5f465ac..e710c84 100755 --- a/out/cli.cjs +++ b/out/cli.cjs @@ -21709,17 +21709,19 @@ var OpenAi = class { this.openAI = new import_openai.OpenAIApi(this.openAiApiConfiguration); } generateCommitMessage = async (messages) => { + const params = { + model: MODEL, + messages, + temperature: 0, + top_p: 0.1, + max_tokens: maxTokens ?? 500 + }; try { - const { data } = await this.openAI.createChatCompletion({ - model: MODEL, - messages, - temperature: 0, - top_p: 0.1, - max_tokens: maxTokens ?? 500 - }); + const { data } = await this.openAI.createChatCompletion(params); const message = data.choices[0].message; return message?.content; } catch (error) { + ce(`${source_default.red("\u2716")} ${JSON.stringify(params)}`); const err = error; ce(`${source_default.red("\u2716")} ${err?.message || err}`); if (axios_default.isAxiosError(error) && error.response?.status === 401) { diff --git a/out/github-action.cjs b/out/github-action.cjs index 26359a7..2f37b5c 100644 --- a/out/github-action.cjs +++ b/out/github-action.cjs @@ -27712,17 +27712,19 @@ var OpenAi = class { this.openAI = new import_openai.OpenAIApi(this.openAiApiConfiguration); } generateCommitMessage = async (messages) => { + const params = { + model: MODEL, + messages, + temperature: 0, + top_p: 0.1, + max_tokens: maxTokens ?? 500 + }; try { - const { data } = await this.openAI.createChatCompletion({ - model: MODEL, - messages, - temperature: 0, - top_p: 0.1, - max_tokens: maxTokens ?? 500 - }); + const { data } = await this.openAI.createChatCompletion(params); const message = data.choices[0].message; return message?.content; } catch (error) { + ce(`${source_default.red("\u2716")} ${JSON.stringify(params)}`); const err = error; ce(`${source_default.red("\u2716")} ${err?.message || err}`); if (axios_default.isAxiosError(error) && error.response?.status === 401) { diff --git a/src/api.ts b/src/api.ts index 6f97602..69b5265 100644 --- a/src/api.ts +++ b/src/api.ts @@ -48,19 +48,22 @@ class OpenAi { public generateCommitMessage = async ( messages: Array ): Promise => { + const params = { + model: MODEL, + messages, + temperature: 0, + top_p: 0.1, + max_tokens: maxTokens ?? 500 + }; try { - const { data } = await this.openAI.createChatCompletion({ - model: MODEL, - messages, - temperature: 0, - top_p: 0.1, - max_tokens: maxTokens ?? 500 - }); + const { data } = await this.openAI.createChatCompletion(params); const message = data.choices[0].message; return message?.content; } catch (error) { + outro(`${chalk.red('✖')} ${JSON.stringify(params)}`); + const err = error as Error; outro(`${chalk.red('✖')} ${err?.message || err}`);