mirror of
https://github.com/di-sukharev/opencommit.git
synced 2026-04-20 03:02:51 -04:00
Add OCO_API_CUSTOM_HEADERS variable to README, config enum, and env parsing to allow JSON string of custom headers. Validate that custom headers are valid JSON in config validator. Extend AiEngineConfig with customHeaders and pass headers to OllamaEngine and OpenAiEngine clients when creating requests. Parse custom headers in utils/engine and warn on invalid format. Add unit tests to ensure OCO_API_CUSTOM_HEADERS is handled correctly and merged from env over global config. This enables users to send additional headers such as Authorization or tracing headers with LLM API calls.
32 lines
898 B
TypeScript
32 lines
898 B
TypeScript
import AnthropicClient from '@anthropic-ai/sdk';
|
|
import { OpenAIClient as AzureOpenAIClient } from '@azure/openai';
|
|
import { GoogleGenerativeAI as GeminiClient } from '@google/generative-ai';
|
|
import { AxiosInstance as RawAxiosClient } from 'axios';
|
|
import { OpenAI as OpenAIClient } from 'openai';
|
|
import { Mistral as MistralClient } from '@mistralai/mistralai';
|
|
|
|
export interface AiEngineConfig {
|
|
apiKey: string;
|
|
model: string;
|
|
maxTokensOutput: number;
|
|
maxTokensInput: number;
|
|
baseURL?: string;
|
|
customHeaders?: Record<string, string>;
|
|
}
|
|
|
|
type Client =
|
|
| OpenAIClient
|
|
| AzureOpenAIClient
|
|
| AnthropicClient
|
|
| RawAxiosClient
|
|
| GeminiClient
|
|
| MistralClient;
|
|
|
|
export interface AiEngine {
|
|
config: AiEngineConfig;
|
|
client: Client;
|
|
generateCommitMessage(
|
|
messages: Array<OpenAIClient.Chat.Completions.ChatCompletionMessageParam>
|
|
): Promise<string | null | undefined>;
|
|
}
|