feat(proxy): add universal proxy support and fix Gemini model resolution (#536)

Integrated undici ProxyAgent for native fetch and HttpsProxyAgent for axios/openai/anthropic. Upgraded @google/generative-ai to fix #536. Added OCO_PROXY config.

Co-authored-by: uni <uni@hanwei.ink>
This commit is contained in:
sky
2026-03-28 16:14:49 +00:00
committed by uni
parent f51393e37a
commit e27007b6fe
16 changed files with 123 additions and 158893 deletions

View File

@@ -11,6 +11,7 @@ export interface AiEngineConfig {
maxTokensOutput: number;
maxTokensInput: number;
baseURL?: string;
proxy?: string;
customHeaders?: Record<string, string>;
}

View File

@@ -1,4 +1,5 @@
import AnthropicClient from '@anthropic-ai/sdk';
import { HttpsProxyAgent } from 'https-proxy-agent';
import {
MessageCreateParamsNonStreaming,
MessageParam
@@ -18,7 +19,15 @@ export class AnthropicEngine implements AiEngine {
constructor(config) {
this.config = config;
this.client = new AnthropicClient({ apiKey: this.config.apiKey });
const clientOptions: any = { apiKey: this.config.apiKey };
const proxy =
config.proxy || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
if (proxy) {
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
}
this.client = new AnthropicClient(clientOptions);
}
public generateCommitMessage = async (

View File

@@ -29,10 +29,15 @@ export class GeminiEngine implements AiEngine {
.map((m) => m.content)
.join('\n');
const gemini = this.client.getGenerativeModel({
model: this.config.model,
systemInstruction
});
const gemini = this.client.getGenerativeModel(
{
model: this.config.model,
systemInstruction
},
{
baseUrl: this.config.baseURL
}
);
const contents = messages
.filter((m) => m.role !== 'system')

View File

@@ -1,4 +1,5 @@
import { OpenAI } from 'openai';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { removeContentTags } from '../utils/removeContentTags';

View File

@@ -1,4 +1,5 @@
import { OpenAI } from 'openai';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { parseCustomHeaders } from '../utils/engine';
import { normalizeEngineError } from '../utils/engineErrorHandler';
@@ -23,6 +24,12 @@ export class OpenAiEngine implements AiEngine {
clientOptions.baseURL = config.baseURL;
}
const proxy =
config.proxy || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
if (proxy) {
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
}
if (config.customHeaders) {
const headers = parseCustomHeaders(config.customHeaders);
if (Object.keys(headers).length > 0) {