Consolidation

This commit is contained in:
Siddharth Ganesan
2026-02-10 13:13:42 -08:00
parent 81bf1e1650
commit 8dbec38a24
10 changed files with 63 additions and 199 deletions

View File

@@ -9,7 +9,6 @@ import { buildConversationHistory } from '@/lib/copilot/chat-context'
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
import {
@@ -24,7 +23,6 @@ import {
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { env } from '@/lib/core/config/env'
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
const logger = createLogger('CopilotChatAPI')
@@ -43,7 +41,7 @@ const ChatMessageSchema = z.object({
chatId: z.string().optional(),
workflowId: z.string().optional(),
workflowName: z.string().optional(),
model: z.string().optional().default('claude-4.6-opus'),
model: z.string().optional().default('claude-opus-4-6'),
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
prefetch: z.boolean().optional(),
createNewChat: z.boolean().optional().default(false),
@@ -173,14 +171,14 @@ export async function POST(req: NextRequest) {
let currentChat: any = null
let conversationHistory: any[] = []
let actualChatId = chatId
const selectedModel = model || 'claude-opus-4-6'
if (chatId || createNewChat) {
const defaultsForChatRow = getCopilotModel('chat')
const chatResult = await resolveOrCreateChat({
chatId,
userId: authenticatedUserId,
workflowId,
model: defaultsForChatRow.model,
model: selectedModel,
})
currentChat = chatResult.chat
actualChatId = chatResult.chatId || chatId
@@ -191,8 +189,6 @@ export async function POST(req: NextRequest) {
conversationHistory = history.history
}
const defaults = getCopilotModel('chat')
const selectedModel = model || defaults.model
const effectiveMode = mode === 'agent' ? 'build' : mode
const effectiveConversationId =
(currentChat?.conversationId as string | undefined) || conversationId
@@ -284,7 +280,7 @@ export async function POST(req: NextRequest) {
}
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
generateChatTitle(message)
generateChatTitle({ message, model: selectedModel, provider })
.then(async (title) => {
if (title) {
await db
@@ -373,10 +369,7 @@ export async function POST(req: NextRequest) {
content: nonStreamingResult.content,
toolCalls: nonStreamingResult.toolCalls,
model: selectedModel,
provider:
(requestPayload?.provider as Record<string, unknown>)?.provider ||
env.COPILOT_PROVIDER ||
'openai',
provider: typeof requestPayload?.provider === 'string' ? requestPayload.provider : undefined,
}
logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, {
@@ -414,7 +407,7 @@ export async function POST(req: NextRequest) {
// Start title generation in parallel if this is first message (non-streaming)
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
generateChatTitle(message)
generateChatTitle({ message, model: selectedModel, provider })
.then(async (title) => {
if (title) {
await db

View File

@@ -2,11 +2,27 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
import { env } from '@/lib/core/config/env'
import type { AvailableModel } from '@/lib/copilot/types'
import { env } from '@/lib/core/config/env'
const logger = createLogger('CopilotModelsAPI')
interface RawAvailableModel {
id: string
friendlyName?: string
displayName?: string
provider?: string
}
function isRawAvailableModel(item: unknown): item is RawAvailableModel {
return (
typeof item === 'object' &&
item !== null &&
'id' in item &&
typeof (item as { id: unknown }).id === 'string'
)
}
export async function GET(_req: NextRequest) {
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !userId) {
@@ -44,8 +60,8 @@ export async function GET(_req: NextRequest) {
const rawModels = Array.isArray(payload?.models) ? payload.models : []
const models: AvailableModel[] = rawModels
.filter((item: any) => item && typeof item.id === 'string')
.map((item: any) => ({
.filter((item: unknown): item is RawAvailableModel => isRawAvailableModel(item))
.map((item: RawAvailableModel) => ({
id: item.id,
friendlyName: item.friendlyName || item.displayName || item.id,
provider: item.provider || 'unknown',

View File

@@ -9,28 +9,12 @@ import { settings } from '@/../../packages/db/schema'
const logger = createLogger('CopilotUserModelsAPI')
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
'gpt-5': true,
'gpt-5-medium': false,
'gpt-5-high': false,
'gpt-5.1-fast': false,
'gpt-5.1': false,
'gpt-5.1-medium': false,
'gpt-5.1-high': false,
'gpt-5-codex': false,
'gpt-5.1-codex': false,
'gpt-5.2': false,
'claude-opus-4-6': true,
'claude-opus-4-5': true,
'claude-sonnet-4-5': true,
'claude-haiku-4-5': true,
'gpt-5.2': true,
'gpt-5.2-codex': true,
'gpt-5.2-pro': true,
o3: true,
'claude-4-sonnet': false,
'claude-4.5-haiku': true,
'claude-4.5-sonnet': true,
'claude-4.6-opus': true,
'claude-4.5-opus': true,
'claude-4.1-opus': false,
'gemini-3-pro': true,
}

View File

@@ -1,77 +1,57 @@
import { createLogger } from '@sim/logger'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import { env } from '@/lib/core/config/env'
import { extractResponseText } from '@/providers/openai/utils'
const logger = createLogger('SimAgentUtils')
const azureApiKey = env.AZURE_OPENAI_API_KEY
const azureEndpoint = env.AZURE_OPENAI_ENDPOINT
const azureApiVersion = env.AZURE_OPENAI_API_VERSION
const chatTitleModelName = env.WAND_OPENAI_MODEL_NAME || 'gpt-4o'
const openaiApiKey = env.OPENAI_API_KEY
const useChatTitleAzure = azureApiKey && azureEndpoint && azureApiVersion
interface GenerateChatTitleParams {
message: string
model: string
provider?: string
}
/**
* Generates a short title for a chat based on the first message
* @param message First user message in the chat
* @returns A short title or null if API key is not available
* using the Copilot backend's server-side provider configuration.
*/
export async function generateChatTitle(message: string): Promise<string | null> {
if (!useChatTitleAzure && !openaiApiKey) {
export async function generateChatTitle({
message,
model,
provider,
}: GenerateChatTitleParams): Promise<string | null> {
if (!message || !model) {
return null
}
const headers: Record<string, string> = {
'Content-Type': 'application/json',
}
if (env.COPILOT_API_KEY) {
headers['x-api-key'] = env.COPILOT_API_KEY
}
try {
const apiUrl = useChatTitleAzure
? `${azureEndpoint?.replace(/\/$/, '')}/openai/v1/responses?api-version=${azureApiVersion}`
: 'https://api.openai.com/v1/responses'
const headers: Record<string, string> = {
'Content-Type': 'application/json',
'OpenAI-Beta': 'responses=v1',
}
if (useChatTitleAzure) {
headers['api-key'] = azureApiKey!
} else {
headers.Authorization = `Bearer ${openaiApiKey}`
}
const response = await fetch(apiUrl, {
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
method: 'POST',
headers,
body: JSON.stringify({
model: useChatTitleAzure ? chatTitleModelName : 'gpt-4o',
input: [
{
role: 'system',
content:
'Generate a very short title (3-5 words max) for a chat that starts with this message. The title should be concise and descriptive. Do not wrap the title in quotes.',
},
{
role: 'user',
content: message,
},
],
max_output_tokens: 20,
temperature: 0.2,
message,
model,
...(provider ? { provider } : {}),
}),
})
const payload = await response.json().catch(() => ({}))
if (!response.ok) {
const errorText = await response.text()
logger.error('Error generating chat title:', {
logger.warn('Failed to generate chat title via copilot backend', {
status: response.status,
statusText: response.statusText,
error: errorText,
error: payload,
})
return null
}
const data = await response.json()
const title = extractResponseText(data.output)?.trim() || null
return title
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
return title || null
} catch (error) {
logger.error('Error generating chat title:', error)
return null

View File

@@ -5,23 +5,6 @@ import type { ProviderId } from '@/providers/types'
const logger = createLogger('CopilotConfig')
/**
* Valid provider IDs for validation
*/
const VALID_PROVIDER_IDS: readonly ProviderId[] = [
'openai',
'azure-openai',
'anthropic',
'azure-anthropic',
'google',
'deepseek',
'xai',
'cerebras',
'mistral',
'groq',
'ollama',
] as const
/**
* Configuration validation constraints
*/
@@ -76,11 +59,6 @@ export interface CopilotConfig {
}
}
function validateProviderId(value: string | undefined): ProviderId | null {
if (!value) return null
return VALID_PROVIDER_IDS.includes(value as ProviderId) ? (value as ProviderId) : null
}
function parseFloatEnv(value: string | undefined, name: string): number | null {
if (!value) return null
const parsed = Number.parseFloat(value)
@@ -131,19 +109,6 @@ export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
}
function applyEnvironmentOverrides(config: CopilotConfig): void {
const chatProvider = validateProviderId(process.env.COPILOT_CHAT_PROVIDER)
if (chatProvider) {
config.chat.defaultProvider = chatProvider
} else if (process.env.COPILOT_CHAT_PROVIDER) {
logger.warn(
`Invalid COPILOT_CHAT_PROVIDER: ${process.env.COPILOT_CHAT_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_CHAT_MODEL) {
config.chat.defaultModel = process.env.COPILOT_CHAT_MODEL
}
const chatTemperature = parseFloatEnv(
process.env.COPILOT_CHAT_TEMPERATURE,
'COPILOT_CHAT_TEMPERATURE'
@@ -157,19 +122,6 @@ function applyEnvironmentOverrides(config: CopilotConfig): void {
config.chat.maxTokens = chatMaxTokens
}
const ragProvider = validateProviderId(process.env.COPILOT_RAG_PROVIDER)
if (ragProvider) {
config.rag.defaultProvider = ragProvider
} else if (process.env.COPILOT_RAG_PROVIDER) {
logger.warn(
`Invalid COPILOT_RAG_PROVIDER: ${process.env.COPILOT_RAG_PROVIDER}. Valid providers: ${VALID_PROVIDER_IDS.join(', ')}`
)
}
if (process.env.COPILOT_RAG_MODEL) {
config.rag.defaultModel = process.env.COPILOT_RAG_MODEL
}
const ragTemperature = parseFloatEnv(
process.env.COPILOT_RAG_TEMPERATURE,
'COPILOT_RAG_TEMPERATURE'

View File

@@ -1,29 +1,3 @@
export const COPILOT_MODEL_IDS = [
'gpt-5-fast',
'gpt-5',
'gpt-5-medium',
'gpt-5-high',
'gpt-5.1-fast',
'gpt-5.1',
'gpt-5.1-medium',
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',
'claude-4-sonnet',
'claude-4.5-haiku',
'claude-4.5-sonnet',
'claude-4.6-opus',
'claude-4.5-opus',
'claude-4.1-opus',
'gemini-3-pro',
] as const
export type CopilotModelId = string
export const COPILOT_MODES = ['ask', 'build', 'plan'] as const

View File

@@ -1,5 +1,3 @@
import type { CopilotProviderConfig } from '@/lib/copilot/types'
export type SSEEventType =
| 'chat_id'
| 'title_updated'
@@ -104,7 +102,7 @@ export interface OrchestratorRequest {
contexts?: Array<{ type: string; content: string }>
fileAttachments?: FileAttachment[]
commands?: string[]
provider?: CopilotProviderConfig
provider?: string
streamToolCalls?: boolean
version?: string
prefetch?: boolean

View File

@@ -1,4 +1,3 @@
import type { ProviderId } from '@/providers/types'
import type { CopilotToolCall, ToolState } from '@/stores/panel'
export type NotificationStatus =
@@ -16,33 +15,3 @@ export interface AvailableModel {
friendlyName: string
provider: string
}
// Provider configuration for Sim Agent requests.
// This type is only for the `provider` field in requests sent to the Sim Agent.
export type CopilotProviderConfig =
| {
provider: 'azure-openai'
model: string
apiKey?: string
apiVersion?: string
endpoint?: string
}
| {
provider: 'azure-anthropic'
model: string
apiKey?: string
apiVersion?: string
endpoint?: string
}
| {
provider: 'vertex'
model: string
apiKey?: string
vertexProject?: string
vertexLocation?: string
}
| {
provider: Exclude<ProviderId, 'azure-openai' | 'azure-anthropic' | 'vertex'>
model?: string
apiKey?: string
}

View File

@@ -29,8 +29,6 @@ export const env = createEnv({
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
// Copilot
COPILOT_PROVIDER: z.string().optional(), // Provider for copilot API calls
COPILOT_MODEL: z.string().optional(), // Model for copilot API calls
COPILOT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
AGENT_INDEXER_URL: z.string().url().optional(), // URL for agent training data indexer

View File

@@ -42,7 +42,6 @@ import {
saveMessageCheckpoint,
} from '@/lib/copilot/messages'
import type { CopilotTransportMode } from '@/lib/copilot/models'
import type { AvailableModel } from '@/lib/copilot/types'
import { parseSSEStream } from '@/lib/copilot/orchestrator/sse-parser'
import {
abortAllInProgressTools,
@@ -52,6 +51,7 @@ import {
stripTodoTags,
} from '@/lib/copilot/store-utils'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import type { AvailableModel } from '@/lib/copilot/types'
import { getQueryClient } from '@/app/_shell/providers/query-provider'
import { subscriptionKeys } from '@/hooks/queries/subscription'
import type {
@@ -577,7 +577,7 @@ async function finalizeStream(
errorType = 'usage_limit'
} else if (result.status === 403) {
errorContent =
'_Provider config not allowed for non-enterprise users. Please remove the provider config and try again_'
'_Access denied by the Copilot backend. Please verify your API key and server configuration._'
errorType = 'forbidden'
} else if (result.status === 426) {
errorContent =
@@ -2246,7 +2246,7 @@ export const useCopilotStore = create<CopilotStore>()(
const provider = model.provider || 'unknown'
// Use composite provider/modelId keys (matching agent block pattern in providers/models.ts)
// so models with the same raw ID from different providers are uniquely identified.
const compositeId = provider ? `${provider}/${model.id}` : model.id
const compositeId = `${provider}/${model.id}`
return {
id: compositeId,
friendlyName: model.friendlyName || model.id,