add custom HTTP headers support via OCO_API_CUSTOM_HEADERS

Add OCO_API_CUSTOM_HEADERS variable to README, config enum,
and env parsing to allow JSON string of custom headers.
Validate that custom headers are valid JSON in config validator.
Extend AiEngineConfig with customHeaders and pass headers to
OllamaEngine and OpenAiEngine clients when creating requests.
Parse custom headers in utils/engine and warn on invalid format.
Add unit tests to ensure OCO_API_CUSTOM_HEADERS is handled
correctly and merged from env over global config.

This enables users to send additional headers such as
Authorization or tracing headers with LLM API calls.
This commit is contained in:
EmilienMottet
2025-04-29 20:51:24 +02:00
parent 25c6a0d5d4
commit 6c48c935e2
7 changed files with 99 additions and 7 deletions

View File

@@ -16,12 +16,29 @@ export function getEngine(): AiEngine {
const config = getConfig();
const provider = config.OCO_AI_PROVIDER;
// Parse custom headers if provided
let customHeaders = {};
if (config.OCO_API_CUSTOM_HEADERS) {
try {
// If it's already an object, no need to parse it
if (typeof config.OCO_API_CUSTOM_HEADERS === 'object' && !Array.isArray(config.OCO_API_CUSTOM_HEADERS)) {
customHeaders = config.OCO_API_CUSTOM_HEADERS;
} else {
// Try to parse as JSON
customHeaders = JSON.parse(config.OCO_API_CUSTOM_HEADERS);
}
} catch (error) {
console.warn('Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers');
}
}
const DEFAULT_CONFIG = {
model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_API_URL!,
apiKey: config.OCO_API_KEY!
apiKey: config.OCO_API_KEY!,
customHeaders // Add custom headers to the configuration
};
switch (provider) {