From 6c48c935e252ed7091f57c5fe5ae4ca9b2d3d51d Mon Sep 17 00:00:00 2001 From: EmilienMottet Date: Tue, 29 Apr 2025 20:51:24 +0200 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20add=20custom=20HTTP=20headers=20sup?= =?UTF-8?q?port=20via=20OCO=5FAPI=5FCUSTOM=5FHEADERS?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add OCO_API_CUSTOM_HEADERS variable to README, config enum, and env parsing to allow JSON string of custom headers. Validate that custom headers are valid JSON in config validator. Extend AiEngineConfig with customHeaders and pass headers to OllamaEngine and OpenAiEngine clients when creating requests. Parse custom headers in utils/engine and warn on invalid format. Add unit tests to ensure OCO_API_CUSTOM_HEADERS is handled correctly and merged from env over global config. This enables users to send additional headers such as Authorization or tracing headers with LLM API calls. --- README.md | 1 + src/commands/config.ts | 19 +++++++++++++++++++ src/engine/Engine.ts | 1 + src/engine/ollama.ts | 9 ++++++++- src/engine/openAi.ts | 33 ++++++++++++++++++++++++++++----- src/utils/engine.ts | 19 ++++++++++++++++++- test/unit/config.test.ts | 24 ++++++++++++++++++++++++ 7 files changed, 99 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 74c4229..b66a84b 100644 --- a/README.md +++ b/README.md @@ -109,6 +109,7 @@ Create a `.env` file and add OpenCommit config variables there like this: OCO_AI_PROVIDER= OCO_API_KEY= // or other LLM provider API token OCO_API_URL= +OCO_API_CUSTOM_HEADERS= OCO_TOKENS_MAX_INPUT= OCO_TOKENS_MAX_OUTPUT= OCO_DESCRIPTION= diff --git a/src/commands/config.ts b/src/commands/config.ts index 7e30cd5..e2dcb4f 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -25,6 +25,7 @@ export enum CONFIG_KEYS { OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT', OCO_TEST_MOCK_TYPE = 'OCO_TEST_MOCK_TYPE', OCO_API_URL = 'OCO_API_URL', + OCO_API_CUSTOM_HEADERS = 'OCO_API_CUSTOM_HEADERS', OCO_OMIT_SCOPE = 'OCO_OMIT_SCOPE', OCO_GITPUSH = 'OCO_GITPUSH' // todo: deprecate } @@ -204,6 +205,22 @@ export const configValidators = { return value; }, + [CONFIG_KEYS.OCO_API_CUSTOM_HEADERS](value) { + try { + // Custom headers must be a valid JSON string + if (typeof value === 'string') { + JSON.parse(value); + } + return value; + } catch (error) { + validateConfig( + CONFIG_KEYS.OCO_API_CUSTOM_HEADERS, + false, + 'Must be a valid JSON string of headers' + ); + } + }, + [CONFIG_KEYS.OCO_TOKENS_MAX_INPUT](value: any) { value = parseInt(value); validateConfig( @@ -380,6 +397,7 @@ export type ConfigType = { [CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number; [CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number; [CONFIG_KEYS.OCO_API_URL]?: string; + [CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string; [CONFIG_KEYS.OCO_DESCRIPTION]: boolean; [CONFIG_KEYS.OCO_EMOJI]: boolean; [CONFIG_KEYS.OCO_WHY]: boolean; @@ -462,6 +480,7 @@ const getEnvConfig = (envPath: string) => { OCO_MODEL: process.env.OCO_MODEL, OCO_API_URL: process.env.OCO_API_URL, OCO_API_KEY: process.env.OCO_API_KEY, + OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS, OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM, OCO_TOKENS_MAX_INPUT: parseConfigVarValue(process.env.OCO_TOKENS_MAX_INPUT), diff --git a/src/engine/Engine.ts b/src/engine/Engine.ts index 1956227..c5bd2e4 100644 --- a/src/engine/Engine.ts +++ b/src/engine/Engine.ts @@ -11,6 +11,7 @@ export interface AiEngineConfig { maxTokensOutput: number; maxTokensInput: number; baseURL?: string; + customHeaders?: Record; } type Client = diff --git a/src/engine/ollama.ts b/src/engine/ollama.ts index 2d21d63..7d0355b 100644 --- a/src/engine/ollama.ts +++ b/src/engine/ollama.ts @@ -11,11 +11,18 @@ export class OllamaEngine implements AiEngine { constructor(config) { this.config = config; + + // Combine base headers with custom headers + const headers = { + 'Content-Type': 'application/json', + ...config.customHeaders + }; + this.client = axios.create({ url: config.baseURL ? `${config.baseURL}/${config.apiKey}` : 'http://localhost:11434/api/chat', - headers: { 'Content-Type': 'application/json' } + headers }); } diff --git a/src/engine/openAi.ts b/src/engine/openAi.ts index 4e1c6a9..9f2a1c8 100644 --- a/src/engine/openAi.ts +++ b/src/engine/openAi.ts @@ -14,11 +14,34 @@ export class OpenAiEngine implements AiEngine { constructor(config: OpenAiConfig) { this.config = config; - if (!config.baseURL) { - this.client = new OpenAI({ apiKey: config.apiKey }); - } else { - this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL }); + // Configuration options for the OpenAI client + const clientOptions: any = { + apiKey: config.apiKey + }; + + // Add baseURL if present + if (config.baseURL) { + clientOptions.baseURL = config.baseURL; } + + // Add custom headers if present + if (config.customHeaders) { + try { + let headers = config.customHeaders; + // If the headers are a string, try to parse them as JSON + if (typeof config.customHeaders === 'string') { + headers = JSON.parse(config.customHeaders); + } + + if (headers && typeof headers === 'object' && Object.keys(headers).length > 0) { + clientOptions.defaultHeaders = headers; + } + } catch (error) { + // Silently ignore parsing errors + } + } + + this.client = new OpenAI(clientOptions); } public generateCommitMessage = async ( @@ -42,7 +65,7 @@ export class OpenAiEngine implements AiEngine { this.config.maxTokensInput - this.config.maxTokensOutput ) throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens); - + const completion = await this.client.chat.completions.create(params); const message = completion.choices[0].message; diff --git a/src/utils/engine.ts b/src/utils/engine.ts index 3137a05..e4a9cea 100644 --- a/src/utils/engine.ts +++ b/src/utils/engine.ts @@ -16,12 +16,29 @@ export function getEngine(): AiEngine { const config = getConfig(); const provider = config.OCO_AI_PROVIDER; + // Parse custom headers if provided + let customHeaders = {}; + if (config.OCO_API_CUSTOM_HEADERS) { + try { + // If it's already an object, no need to parse it + if (typeof config.OCO_API_CUSTOM_HEADERS === 'object' && !Array.isArray(config.OCO_API_CUSTOM_HEADERS)) { + customHeaders = config.OCO_API_CUSTOM_HEADERS; + } else { + // Try to parse as JSON + customHeaders = JSON.parse(config.OCO_API_CUSTOM_HEADERS); + } + } catch (error) { + console.warn('Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'); + } + } + const DEFAULT_CONFIG = { model: config.OCO_MODEL!, maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!, maxTokensInput: config.OCO_TOKENS_MAX_INPUT!, baseURL: config.OCO_API_URL!, - apiKey: config.OCO_API_KEY! + apiKey: config.OCO_API_KEY!, + customHeaders // Add custom headers to the configuration }; switch (provider) { diff --git a/test/unit/config.test.ts b/test/unit/config.test.ts index 89ffc7e..871655d 100644 --- a/test/unit/config.test.ts +++ b/test/unit/config.test.ts @@ -122,6 +122,30 @@ describe('config', () => { expect(config.OCO_ONE_LINE_COMMIT).toEqual(false); expect(config.OCO_OMIT_SCOPE).toEqual(true); }); + + it('should handle custom HTTP headers correctly', async () => { + globalConfigFile = await generateConfig('.opencommit', { + OCO_API_CUSTOM_HEADERS: '{"X-Global-Header": "global-value"}' + }); + + envConfigFile = await generateConfig('.env', { + OCO_API_CUSTOM_HEADERS: '{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}' + }); + + const config = getConfig({ + globalPath: globalConfigFile.filePath, + envPath: envConfigFile.filePath + }); + + expect(config).not.toEqual(null); + expect(config.OCO_API_CUSTOM_HEADERS).toEqual('{"Authorization": "Bearer token123", "X-Custom-Header": "test-value"}'); + + // Verify that the JSON can be parsed correctly + const parsedHeaders = JSON.parse(config.OCO_API_CUSTOM_HEADERS); + expect(parsedHeaders).toHaveProperty('Authorization', 'Bearer token123'); + expect(parsedHeaders).toHaveProperty('X-Custom-Header', 'test-value'); + expect(parsedHeaders).not.toHaveProperty('X-Global-Header'); + }); it('should handle empty local config correctly', async () => { globalConfigFile = await generateConfig('.opencommit', {