fix(cli): tighten proxy and setup behavior

Support explicit proxy disabling and ambient proxy fallback without leaking env state into config.

Improve first-run detection, endpoint-specific error messaging, diff exclusions, and runtime helper boundaries covered by unit tests.
This commit is contained in:
di-sukharev
2026-04-10 15:16:11 +03:00
parent 7fa2384761
commit cf27085ac9
23 changed files with 527 additions and 214 deletions

View File

@@ -8,7 +8,7 @@ import { commitlintConfigCommand } from './commands/commitlint';
import { configCommand, getConfig } from './commands/config';
import { hookCommand, isHookCalled } from './commands/githook.js';
import { prepareCommitMessageHook } from './commands/prepare-commit-msg-hook';
import { setupProxy } from './utils/proxy';
import { resolveProxy, setupProxy } from './utils/proxy';
import {
setupCommand,
isFirstRun,
@@ -20,7 +20,7 @@ import { checkIsLatestVersion } from './utils/checkIsLatestVersion';
import { runMigrations } from './migrations/_run.js';
const config = getConfig();
setupProxy(config.OCO_PROXY);
setupProxy(resolveProxy(config.OCO_PROXY));
const OCO_FLAGS_WITH_VALUE = new Set(['-c', '--context']);
const OCO_BOOLEAN_FLAGS = new Set(['-y', '--yes', '--fgm']);

View File

@@ -249,7 +249,9 @@ ${chalk.grey('——————————————————')}`
const errorConfig = getConfig();
const provider = errorConfig.OCO_AI_PROVIDER || 'openai';
const formatted = formatUserFriendlyError(error, provider);
const formatted = formatUserFriendlyError(error, provider, {
baseURL: errorConfig.OCO_API_URL
});
outro(printFormattedError(formatted));
process.exit(1);

View File

@@ -723,7 +723,8 @@ export const configValidators = {
[CONFIG_KEYS.OCO_API_URL](value: any) {
validateConfig(
CONFIG_KEYS.OCO_API_URL,
typeof value === 'string',
typeof value === 'string' &&
/^(https?:\/\/)/.test(value),
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
);
return value;
@@ -732,7 +733,8 @@ export const configValidators = {
[CONFIG_KEYS.OCO_PROXY](value: any) {
validateConfig(
CONFIG_KEYS.OCO_PROXY,
typeof value === 'string',
value === null ||
(typeof value === 'string' && /^(https?:\/\/)/.test(value)),
`${value} is not a valid URL. It should start with 'http://' or 'https://'.`
);
return value;
@@ -900,7 +902,7 @@ export type ConfigType = {
[CONFIG_KEYS.OCO_TOKENS_MAX_INPUT]: number;
[CONFIG_KEYS.OCO_TOKENS_MAX_OUTPUT]: number;
[CONFIG_KEYS.OCO_API_URL]?: string;
[CONFIG_KEYS.OCO_PROXY]?: string;
[CONFIG_KEYS.OCO_PROXY]?: string | null;
[CONFIG_KEYS.OCO_API_CUSTOM_HEADERS]?: string;
[CONFIG_KEYS.OCO_DESCRIPTION]: boolean;
[CONFIG_KEYS.OCO_EMOJI]: boolean;
@@ -986,10 +988,7 @@ const getEnvConfig = (envPath: string) => {
return {
OCO_MODEL: process.env.OCO_MODEL,
OCO_API_URL: process.env.OCO_API_URL,
OCO_PROXY:
process.env.OCO_PROXY ||
process.env.HTTPS_PROXY ||
process.env.HTTP_PROXY,
OCO_PROXY: process.env.OCO_PROXY,
OCO_API_KEY: process.env.OCO_API_KEY,
OCO_API_CUSTOM_HEADERS: process.env.OCO_API_CUSTOM_HEADERS,
OCO_AI_PROVIDER: process.env.OCO_AI_PROVIDER as OCO_AI_PROVIDER_ENUM,
@@ -1027,16 +1026,13 @@ export const getIsGlobalConfigFileExist = (
};
export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
let globalConfig: ConfigType;
const isGlobalConfigFileExist = getIsGlobalConfigFileExist(configPath);
if (!isGlobalConfigFileExist) globalConfig = initGlobalConfig(configPath);
else {
const configFile = readFileSync(configPath, 'utf8');
globalConfig = iniParse(configFile) as ConfigType;
if (!isGlobalConfigFileExist) {
return { ...DEFAULT_CONFIG };
}
return globalConfig;
const configFile = readFileSync(configPath, 'utf8');
return iniParse(configFile) as ConfigType;
};
/**
@@ -1049,7 +1045,10 @@ export const getGlobalConfig = (configPath: string = defaultConfigPath) => {
const mergeConfigs = (main: Partial<ConfigType>, fallback: ConfigType) => {
const allKeys = new Set([...Object.keys(main), ...Object.keys(fallback)]);
return Array.from(allKeys).reduce((acc, key) => {
acc[key] = parseConfigVarValue(main[key] ?? fallback[key]);
const mainValue = main[key];
acc[key] = parseConfigVarValue(
mainValue !== undefined ? mainValue : fallback[key]
);
return acc;
}, {} as ConfigType);
};
@@ -1218,7 +1217,10 @@ function getConfigKeyDetails(key) {
case CONFIG_KEYS.OCO_PROXY:
return {
description: 'HTTP/HTTPS Proxy URL',
values: ["URL string (must start with 'http://' or 'https://')"]
values: [
"URL string (must start with 'http://' or 'https://')",
'null (disable proxy even when HTTP_PROXY/HTTPS_PROXY are set)'
]
};
case CONFIG_KEYS.OCO_MESSAGE_TEMPLATE_PLACEHOLDER:
return {

View File

@@ -427,22 +427,23 @@ export async function runSetup(): Promise<boolean> {
}
export function isFirstRun(): boolean {
const hasGlobalConfig = getIsGlobalConfigFileExist();
const config = getConfig();
// Check if API key is missing for providers that need it
const provider = config.OCO_AI_PROVIDER || OCO_AI_PROVIDER_ENUM.OPENAI;
if (MODEL_REQUIRED_PROVIDERS.includes(provider as OCO_AI_PROVIDER_ENUM)) {
// For Ollama/MLX, check if model is set
return !config.OCO_MODEL;
}
if (provider === OCO_AI_PROVIDER_ENUM.TEST) {
return false;
}
// For other providers, check if API key is set
return !config.OCO_API_KEY;
const hasRequiredConfig = MODEL_REQUIRED_PROVIDERS.includes(
provider as OCO_AI_PROVIDER_ENUM
)
? Boolean(config.OCO_MODEL)
: Boolean(config.OCO_API_KEY);
// Trigger the full setup wizard only when nothing usable was configured yet.
return !hasGlobalConfig && !hasRequiredConfig;
}
export async function promptForMissingApiKey(): Promise<boolean> {

View File

@@ -11,7 +11,7 @@ export interface AiEngineConfig {
maxTokensOutput: number;
maxTokensInput: number;
baseURL?: string;
proxy?: string;
proxy?: string | null;
customHeaders?: Record<string, string>;
ollamaThink?: boolean;
}

View File

@@ -5,8 +5,8 @@ import {
MessageParam
} from '@anthropic-ai/sdk/resources/messages.mjs';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -21,8 +21,7 @@ export class AnthropicEngine implements AiEngine {
this.config = config;
const clientOptions: any = { apiKey: this.config.apiKey };
const proxy =
config.proxy || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
const proxy = config.proxy;
if (proxy) {
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
}

View File

@@ -3,8 +3,8 @@ import {
OpenAIClient as AzureOpenAIClient
} from '@azure/openai';
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';

View File

@@ -1,6 +1,6 @@
import { OpenAI } from 'openai';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { OpenAiEngine, OpenAiConfig } from './openAi';

View File

@@ -1,7 +1,8 @@
import { OpenAI } from 'openai';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { createRequire } from 'module';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -10,8 +11,14 @@ import { AiEngine, AiEngineConfig } from './Engine';
export interface MistralAiConfig extends AiEngineConfig {}
export type MistralCompletionMessageParam = Array<any>;
// Import Mistral dynamically to avoid TS errors
// eslint-disable-next-line @typescript-eslint/no-var-requires
let require: NodeRequire;
try {
require = createRequire(__filename);
} catch {
require = createRequire(import.meta.url);
}
const Mistral = require('@mistralai/mistralai').Mistral;
export class MistralAiEngine implements AiEngine {

View File

@@ -1,8 +1,8 @@
import { OpenAI } from 'openai';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
import { parseCustomHeaders } from '../utils/engine';
import { parseCustomHeaders } from '../utils/customHeaders';
import { normalizeEngineError } from '../utils/engineErrorHandler';
import { GenerateCommitMessageErrorEnum } from '../utils/generateCommitMessageErrors';
import { removeContentTags } from '../utils/removeContentTags';
import { tokenCount } from '../utils/tokenCount';
import { AiEngine, AiEngineConfig } from './Engine';
@@ -24,8 +24,7 @@ export class OpenAiEngine implements AiEngine {
clientOptions.baseURL = config.baseURL;
}
const proxy =
config.proxy || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
const proxy = config.proxy;
if (proxy) {
clientOptions.httpAgent = new HttpsProxyAgent(proxy);
}

View File

@@ -16,6 +16,7 @@ import {
getSuggestedModels,
ModelNotFoundError
} from './utils/errors';
import { GenerateCommitMessageErrorEnum } from './utils/generateCommitMessageErrors';
import { mergeDiffs } from './utils/mergeDiffs';
import { tokenCount } from './utils/tokenCount';
@@ -43,13 +44,6 @@ const generateCommitMessageChatCompletionPrompt = async (
return chatContextAsCompletionRequest;
};
export enum GenerateCommitMessageErrorEnum {
tooMuchTokens = 'TOO_MUCH_TOKENS',
internalError = 'INTERNAL_ERROR',
emptyMessage = 'EMPTY_MESSAGE',
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
}
async function handleModelNotFoundError(
error: Error,
provider: string,

View File

@@ -0,0 +1,21 @@
export function parseCustomHeaders(headers: any): Record<string, string> {
let parsedHeaders = {};
if (!headers) {
return parsedHeaders;
}
try {
if (typeof headers === 'object' && !Array.isArray(headers)) {
parsedHeaders = headers;
} else {
parsedHeaders = JSON.parse(headers);
}
} catch {
console.warn(
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
);
}
return parsedHeaders;
}

View File

@@ -13,41 +13,22 @@ import { MLXEngine } from '../engine/mlx';
import { DeepseekEngine } from '../engine/deepseek';
import { AimlApiEngine } from '../engine/aimlapi';
import { OpenRouterEngine } from '../engine/openrouter';
export function parseCustomHeaders(headers: any): Record<string, string> {
let parsedHeaders = {};
if (!headers) {
return parsedHeaders;
}
try {
if (typeof headers === 'object' && !Array.isArray(headers)) {
parsedHeaders = headers;
} else {
parsedHeaders = JSON.parse(headers);
}
} catch (error) {
console.warn(
'Invalid OCO_API_CUSTOM_HEADERS format, ignoring custom headers'
);
}
return parsedHeaders;
}
import { parseCustomHeaders } from './customHeaders';
import { resolveProxy } from './proxy';
export function getEngine(): AiEngine {
const config = getConfig();
const provider = config.OCO_AI_PROVIDER;
const customHeaders = parseCustomHeaders(config.OCO_API_CUSTOM_HEADERS);
const resolvedProxy = resolveProxy(config.OCO_PROXY);
const DEFAULT_CONFIG = {
model: config.OCO_MODEL!,
maxTokensOutput: config.OCO_TOKENS_MAX_OUTPUT!,
maxTokensInput: config.OCO_TOKENS_MAX_INPUT!,
baseURL: config.OCO_API_URL!,
proxy: config.OCO_PROXY!,
proxy: resolvedProxy,
apiKey: config.OCO_API_KEY!,
customHeaders
};

View File

@@ -349,10 +349,44 @@ export interface FormattedError {
suggestion: string | null;
}
export interface ErrorFormattingContext {
baseURL?: string;
}
function getCustomEndpointLabel(baseURL?: string): string | null {
if (!baseURL) {
return null;
}
try {
return new URL(baseURL).host;
} catch {
return null;
}
}
function getServiceUnavailableMessage(
provider: string,
context?: ErrorFormattingContext
): string {
const endpointLabel = getCustomEndpointLabel(context?.baseURL);
if (endpointLabel) {
return `The configured API endpoint (${endpointLabel}) is temporarily unavailable.`;
}
if (context?.baseURL) {
return 'The configured API endpoint is temporarily unavailable.';
}
return `The ${provider} service is temporarily unavailable.`;
}
// Format an error into a user-friendly structure
export function formatUserFriendlyError(
error: unknown,
provider: string
provider: string,
context?: ErrorFormattingContext
): FormattedError {
const billingUrl = PROVIDER_BILLING_URLS[provider] || null;
@@ -381,7 +415,7 @@ export function formatUserFriendlyError(
if (error instanceof ServiceUnavailableError) {
return {
title: 'Service Unavailable',
message: `The ${provider} service is temporarily unavailable.`,
message: getServiceUnavailableMessage(provider, context),
helpUrl: null,
suggestion: 'Please try again in a few moments.'
};
@@ -427,7 +461,7 @@ export function formatUserFriendlyError(
if (isServiceUnavailableError(error)) {
return {
title: 'Service Unavailable',
message: `The ${provider} service is temporarily unavailable.`,
message: getServiceUnavailableMessage(provider, context),
helpUrl: null,
suggestion: 'Please try again in a few moments.'
};

View File

@@ -0,0 +1,8 @@
import { DEFAULT_TOKEN_LIMITS } from '../commands/config';
export enum GenerateCommitMessageErrorEnum {
tooMuchTokens = 'TOO_MUCH_TOKENS',
internalError = 'INTERNAL_ERROR',
emptyMessage = 'EMPTY_MESSAGE',
outputTokensTooHigh = `Token limit exceeded, OCO_TOKENS_MAX_OUTPUT must not be much higher than the default ${DEFAULT_TOKEN_LIMITS.DEFAULT_MAX_TOKENS_OUTPUT} tokens.`
}

View File

@@ -93,36 +93,34 @@ export const gitAdd = async ({ files }: { files: string[] }) => {
gitAddSpinner.stop(`Staged ${files.length} files`);
};
const isFileExcludedFromDiff = (file: string) =>
file.includes('.lock') ||
file.includes('-lock.') ||
file.includes('.svg') ||
file.includes('.png') ||
file.includes('.jpg') ||
file.includes('.jpeg') ||
file.includes('.webp') ||
file.includes('.gif');
export const getDiff = async ({ files }: { files: string[] }) => {
const gitDir = await getGitDir();
const lockFiles = files.filter(
(file) =>
file.includes('.lock') ||
file.includes('-lock.') ||
file.includes('.svg') ||
file.includes('.png') ||
file.includes('.jpg') ||
file.includes('.jpeg') ||
file.includes('.webp') ||
file.includes('.gif')
);
const excludedFiles = files.filter(isFileExcludedFromDiff);
if (lockFiles.length) {
if (excludedFiles.length) {
outro(
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${lockFiles.join(
`Some files are excluded by default from 'git diff'. No commit messages are generated for this files:\n${excludedFiles.join(
'\n'
)}`
);
}
const filesWithoutLocks = files.filter(
(file) => !file.includes('.lock') && !file.includes('-lock.')
);
const diffableFiles = files.filter((file) => !isFileExcludedFromDiff(file));
const { stdout: diff } = await execa(
'git',
['diff', '--staged', '--', ...filesWithoutLocks],
['diff', '--staged', '--', ...diffableFiles],
{ cwd: gitDir }
);

View File

@@ -1,21 +1,56 @@
import { setGlobalDispatcher, ProxyAgent } from 'undici';
import axios from 'axios';
import { HttpsProxyAgent } from 'https-proxy-agent';
import {
Agent,
ProxyAgent,
setGlobalDispatcher
} from 'undici';
export function setupProxy(proxyUrl?: string) {
const proxy = proxyUrl || process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
if (proxy) {
try {
// Set global dispatcher for undici (affects globalThis.fetch used by Gemini and others)
const dispatcher = new ProxyAgent(proxy);
setGlobalDispatcher(dispatcher);
export type ProxySetting = string | null | undefined;
// Set axios global agent
const agent = new HttpsProxyAgent(proxy);
axios.defaults.httpsAgent = agent;
axios.defaults.proxy = false; // Disable axios built-in proxy handling to use agent
} catch (error) {
console.warn(`[Proxy Error] Failed to set proxy: ${error.message}`);
export function resolveProxy(proxySetting?: ProxySetting): ProxySetting {
if (proxySetting === null) {
return null;
}
if (typeof proxySetting === 'string' && proxySetting.trim().length > 0) {
return proxySetting;
}
return process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
}
function resetProxySetup(disableEnvProxy: boolean) {
setGlobalDispatcher(new Agent());
axios.defaults.httpAgent = undefined;
axios.defaults.httpsAgent = undefined;
axios.defaults.proxy = disableEnvProxy ? false : undefined;
}
export function setupProxy(proxySetting?: ProxySetting) {
try {
if (proxySetting === null) {
resetProxySetup(true);
return;
}
resetProxySetup(false);
if (!proxySetting) {
return;
}
// Set global dispatcher for undici (affects globalThis.fetch used by Gemini and others)
const dispatcher = new ProxyAgent(proxySetting);
setGlobalDispatcher(dispatcher);
// Set axios global agents and disable axios built-in proxy handling.
const agent = new HttpsProxyAgent(proxySetting);
axios.defaults.httpAgent = agent;
axios.defaults.httpsAgent = agent;
axios.defaults.proxy = false;
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
console.warn(`[Proxy Error] Failed to set proxy: ${message}`);
}
}