From f582c78220b9b071564cb218c1acba2513f366de Mon Sep 17 00:00:00 2001 From: Siddharth Ganesan Date: Thu, 5 Feb 2026 16:19:52 -0800 Subject: [PATCH] Refactor --- ...ex-function-inventory-edit-workflow.ts.txt | 35 + ...-inventory-get-blocks-metadata-tool.ts.txt | 21 + ...function-inventory-process-contents.ts.txt | 13 + apps/sim/app/api/copilot/chat/route.ts | 341 +--- apps/sim/app/api/copilot/confirm/route.ts | 5 +- .../copilot-message/copilot-message.tsx | 6 +- apps/sim/hooks/use-undo-redo.ts | 30 +- apps/sim/lib/copilot/api.ts | 9 +- apps/sim/lib/copilot/chat-context.ts | 63 + apps/sim/lib/copilot/chat-lifecycle.ts | 69 + apps/sim/lib/copilot/chat-payload.ts | 252 +++ .../lib/copilot/client-sse/content-blocks.ts | 33 +- apps/sim/lib/copilot/client-sse/handlers.ts | 154 +- .../copilot/client-sse/subagent-handlers.ts | 65 +- apps/sim/lib/copilot/client-sse/types.ts | 28 +- apps/sim/lib/copilot/constants.ts | 104 ++ apps/sim/lib/copilot/messages/checkpoints.ts | 5 +- .../copilot/messages/credential-masking.ts | 19 +- apps/sim/lib/copilot/messages/index.ts | 1 + apps/sim/lib/copilot/messages/persist.ts | 43 + .../sim/lib/copilot/messages/serialization.ts | 35 +- .../lib/copilot/orchestrator/persistence.ts | 3 +- .../orchestrator/sse-handlers/handlers.ts | 58 +- .../sse-handlers/tool-execution.ts | 14 +- .../sim/lib/copilot/orchestrator/sse-utils.ts | 48 +- .../lib/copilot/orchestrator/stream-buffer.ts | 13 +- .../lib/copilot/orchestrator/stream-core.ts | 3 +- apps/sim/lib/copilot/orchestrator/subagent.ts | 15 +- .../tool-executor/deployment-tools/manage.ts | 12 +- .../orchestrator/tool-executor/index.ts | 6 +- .../tool-executor/integration-tools.ts | 6 +- .../tool-executor/workflow-tools/mutations.ts | 29 +- .../tool-executor/workflow-tools/queries.ts | 88 +- apps/sim/lib/copilot/orchestrator/types.ts | 14 +- apps/sim/lib/copilot/store-utils.ts | 43 +- .../server/blocks/get-blocks-metadata-tool.ts | 9 +- .../tools/server/user/get-credentials.ts | 8 +- .../server/workflow/get-workflow-console.ts | 7 +- apps/sim/stores/panel/copilot/store.ts | 1646 +++++++++-------- apps/sim/stores/panel/copilot/types.ts | 35 +- apps/sim/stores/workflow-diff/store.ts | 48 +- apps/sim/stores/workflow-diff/types.ts | 11 +- apps/sim/stores/workflow-diff/utils.ts | 21 +- 43 files changed, 2112 insertions(+), 1356 deletions(-) create mode 100644 apps/sim/.codex-function-inventory-edit-workflow.ts.txt create mode 100644 apps/sim/.codex-function-inventory-get-blocks-metadata-tool.ts.txt create mode 100644 apps/sim/.codex-function-inventory-process-contents.ts.txt create mode 100644 apps/sim/lib/copilot/chat-context.ts create mode 100644 apps/sim/lib/copilot/chat-lifecycle.ts create mode 100644 apps/sim/lib/copilot/chat-payload.ts create mode 100644 apps/sim/lib/copilot/messages/persist.ts diff --git a/apps/sim/.codex-function-inventory-edit-workflow.ts.txt b/apps/sim/.codex-function-inventory-edit-workflow.ts.txt new file mode 100644 index 000000000..e77b30fbe --- /dev/null +++ b/apps/sim/.codex-function-inventory-edit-workflow.ts.txt @@ -0,0 +1,35 @@ +# lib/copilot/tools/server/workflow/edit-workflow.ts + 90-98 ( 9 lines) [function] logSkippedItem + 103-113 ( 11 lines) [function] findBlockWithDuplicateNormalizedName + 127-196 ( 70 lines) [function] validateInputsForBlock + 211-463 ( 253 lines) [function] validateValueForSubBlockType + 481-566 ( 86 lines) [function] topologicalSortInserts + 571-684 ( 114 lines) [function] createBlockFromParams + 686-716 ( 31 lines) [function] updateCanonicalModesForInputs + 721-762 ( 42 lines) [function] normalizeTools + 786-804 ( 19 lines) [function] normalizeArrayWithIds + 809-811 ( 3 lines) [function] shouldNormalizeArrayIds + 818-859 ( 42 lines) [function] normalizeResponseFormat + 834-847 ( 14 lines) [arrow] sortKeys + 871-945 ( 75 lines) [function] validateSourceHandleForBlock + 956-1051 ( 96 lines) [function] validateConditionHandle +1062-1136 ( 75 lines) [function] validateRouterHandle +1141-1149 ( 9 lines) [function] validateTargetHandle +1155-1261 ( 107 lines) [function] createValidatedEdge +1270-1307 ( 38 lines) [function] addConnectionsAsEdges +1280-1291 ( 12 lines) [arrow] addEdgeForTarget +1309-1339 ( 31 lines) [function] applyTriggerConfigToBlockSubblocks +1353-1361 ( 9 lines) [function] isBlockTypeAllowed +1367-1404 ( 38 lines) [function] filterDisallowedTools +1413-1499 ( 87 lines) [function] normalizeBlockIdsInOperations +1441-1444 ( 4 lines) [arrow] replaceId +1504-2676 (1173 lines) [function] applyOperationsToWorkflowState +1649-1656 ( 8 lines) [arrow] findChildren +2055-2059 ( 5 lines) [arrow] mapConnectionTypeToHandle +2063-2074 ( 12 lines) [arrow] addEdgeForTarget +2682-2777 ( 96 lines) [function] validateWorkflowSelectorIds +2786-3066 ( 281 lines) [function] preValidateCredentialInputs +2820-2845 ( 26 lines) [function] collectCredentialInputs +2850-2870 ( 21 lines) [function] collectHostedApiKeyInput +3068-3117 ( 50 lines) [function] getCurrentWorkflowStateFromDb +3121-3333 ( 213 lines) [method] .execute diff --git a/apps/sim/.codex-function-inventory-get-blocks-metadata-tool.ts.txt b/apps/sim/.codex-function-inventory-get-blocks-metadata-tool.ts.txt new file mode 100644 index 000000000..61d57991b --- /dev/null +++ b/apps/sim/.codex-function-inventory-get-blocks-metadata-tool.ts.txt @@ -0,0 +1,21 @@ +# lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts + 108-306 ( 199 lines) [method] .execute + 309-384 ( 76 lines) [function] transformBlockMetadata + 386-459 ( 74 lines) [function] extractInputs + 461-503 ( 43 lines) [function] extractOperationInputs + 505-518 ( 14 lines) [function] extractOutputs + 520-538 ( 19 lines) [function] formatOutputsFromDefinition + 540-563 ( 24 lines) [function] mapSchemaTypeToSimpleType + 565-591 ( 27 lines) [function] generateInputExample + 593-669 ( 77 lines) [function] processSubBlock + 671-679 ( 9 lines) [function] resolveAuthType + 686-702 ( 17 lines) [function] getStaticModelOptions + 712-754 ( 43 lines) [function] callOptionsWithFallback + 756-806 ( 51 lines) [function] resolveSubblockOptions + 808-820 ( 13 lines) [function] removeNullish + 822-832 ( 11 lines) [function] normalizeCondition + 834-872 ( 39 lines) [function] splitParametersByOperation + 874-905 ( 32 lines) [function] computeBlockLevelInputs + 907-935 ( 29 lines) [function] computeOperationLevelInputs + 937-947 ( 11 lines) [function] resolveOperationIds + 949-961 ( 13 lines) [function] resolveToolIdForOperation diff --git a/apps/sim/.codex-function-inventory-process-contents.ts.txt b/apps/sim/.codex-function-inventory-process-contents.ts.txt new file mode 100644 index 000000000..82e8de18e --- /dev/null +++ b/apps/sim/.codex-function-inventory-process-contents.ts.txt @@ -0,0 +1,13 @@ +# lib/copilot/process-contents.ts + 31-81 ( 51 lines) [function] processContexts + 84-161 ( 78 lines) [function] processContextsServer + 163-208 ( 46 lines) [function] sanitizeMessageForDocs + 210-248 ( 39 lines) [function] processPastChatFromDb + 250-281 ( 32 lines) [function] processWorkflowFromDb + 283-316 ( 34 lines) [function] processPastChat + 319-321 ( 3 lines) [function] processPastChatViaApi + 323-362 ( 40 lines) [function] processKnowledgeFromDb + 364-439 ( 76 lines) [function] processBlockMetadata + 441-473 ( 33 lines) [function] processTemplateFromDb + 475-498 ( 24 lines) [function] processWorkflowBlockFromDb + 500-555 ( 56 lines) [function] processExecutionLogFromDb diff --git a/apps/sim/app/api/copilot/chat/route.ts b/apps/sim/app/api/copilot/chat/route.ts index c6b1250c5..a048e31d2 100644 --- a/apps/sim/app/api/copilot/chat/route.ts +++ b/apps/sim/app/api/copilot/chat/route.ts @@ -6,8 +6,10 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' import { generateChatTitle } from '@/lib/copilot/chat-title' +import { buildConversationHistory } from '@/lib/copilot/chat-context' +import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle' +import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload' import { getCopilotModel } from '@/lib/copilot/config' -import { SIM_AGENT_VERSION } from '@/lib/copilot/constants' import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models' import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' import { @@ -22,14 +24,8 @@ import { createRequestTracker, createUnauthorizedResponse, } from '@/lib/copilot/request-helpers' -import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials' -import type { CopilotProviderConfig } from '@/lib/copilot/types' import { env } from '@/lib/core/config/env' -import { CopilotFiles } from '@/lib/uploads' -import { createFileContent } from '@/lib/uploads/utils/file-utils' import { resolveWorkflowIdForUser } from '@/lib/workflows/utils' -import { tools } from '@/tools/registry' -import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils' const logger = createLogger('CopilotChatAPI') @@ -178,311 +174,66 @@ export async function POST(req: NextRequest) { let conversationHistory: any[] = [] let actualChatId = chatId - if (chatId) { - // Load existing chat - const [chat] = await db - .select() - .from(copilotChats) - .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, authenticatedUserId))) - .limit(1) - - if (chat) { - currentChat = chat - conversationHistory = Array.isArray(chat.messages) ? chat.messages : [] - } - } else if (createNewChat && workflowId) { - // Create new chat - const { provider, model } = getCopilotModel('chat') - const [newChat] = await db - .insert(copilotChats) - .values({ - userId: authenticatedUserId, - workflowId, - title: null, - model, - messages: [], - }) - .returning() - - if (newChat) { - currentChat = newChat - actualChatId = newChat.id - } - } - - // Process file attachments if present - const processedFileContents: any[] = [] - if (fileAttachments && fileAttachments.length > 0) { - const processedAttachments = await CopilotFiles.processCopilotAttachments( - fileAttachments, - tracker.requestId + if (chatId || createNewChat) { + const defaultsForChatRow = getCopilotModel('chat') + const chatResult = await resolveOrCreateChat({ + chatId, + userId: authenticatedUserId, + workflowId, + model: defaultsForChatRow.model, + }) + currentChat = chatResult.chat + actualChatId = chatResult.chatId || chatId + const history = buildConversationHistory( + chatResult.conversationHistory, + (chatResult.chat?.conversationId as string | undefined) || conversationId ) - - for (const { buffer, attachment } of processedAttachments) { - const fileContent = createFileContent(buffer, attachment.media_type) - if (fileContent) { - processedFileContents.push(fileContent) - } - } - } - - // Build messages array for sim agent with conversation history - const messages: any[] = [] - - // Add conversation history (need to rebuild these with file support if they had attachments) - for (const msg of conversationHistory) { - if (msg.fileAttachments && msg.fileAttachments.length > 0) { - // This is a message with file attachments - rebuild with content array - const content: any[] = [{ type: 'text', text: msg.content }] - - const processedHistoricalAttachments = await CopilotFiles.processCopilotAttachments( - msg.fileAttachments, - tracker.requestId - ) - - for (const { buffer, attachment } of processedHistoricalAttachments) { - const fileContent = createFileContent(buffer, attachment.media_type) - if (fileContent) { - content.push(fileContent) - } - } - - messages.push({ - role: msg.role, - content, - }) - } else { - // Regular text-only message - messages.push({ - role: msg.role, - content: msg.content, - }) - } - } - - // Add implicit feedback if provided - if (implicitFeedback) { - messages.push({ - role: 'system', - content: implicitFeedback, - }) - } - - // Add current user message with file attachments - if (processedFileContents.length > 0) { - // Message with files - use content array format - const content: any[] = [{ type: 'text', text: message }] - - // Add file contents - for (const fileContent of processedFileContents) { - content.push(fileContent) - } - - messages.push({ - role: 'user', - content, - }) - } else { - // Text-only message - messages.push({ - role: 'user', - content: message, - }) + conversationHistory = history.history } const defaults = getCopilotModel('chat') const selectedModel = model || defaults.model - const envModel = env.COPILOT_MODEL || defaults.model - - let providerConfig: CopilotProviderConfig | undefined - const providerEnv = env.COPILOT_PROVIDER as any - - if (providerEnv) { - if (providerEnv === 'azure-openai') { - providerConfig = { - provider: 'azure-openai', - model: envModel, - apiKey: env.AZURE_OPENAI_API_KEY, - apiVersion: 'preview', - endpoint: env.AZURE_OPENAI_ENDPOINT, - } - } else if (providerEnv === 'vertex') { - providerConfig = { - provider: 'vertex', - model: envModel, - apiKey: env.COPILOT_API_KEY, - vertexProject: env.VERTEX_PROJECT, - vertexLocation: env.VERTEX_LOCATION, - } - } else { - providerConfig = { - provider: providerEnv, - model: selectedModel, - apiKey: env.COPILOT_API_KEY, - } - } - } - const effectiveMode = mode === 'agent' ? 'build' : mode - const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode - - // Determine conversationId to use for this request const effectiveConversationId = (currentChat?.conversationId as string | undefined) || conversationId - // For agent/build mode, fetch credentials and build tool definitions - let integrationTools: any[] = [] - let baseTools: any[] = [] - let credentials: { - oauth: Record< - string, - { accessToken: string; accountId: string; name: string; expiresAt?: string } - > - apiKeys: string[] - metadata?: { - connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> - configuredApiKeys: string[] + const requestPayload = await buildCopilotRequestPayload( + { + message, + workflowId, + userId: authenticatedUserId, + userMessageId: userMessageIdToUse, + mode, + model: selectedModel, + stream, + conversationId: effectiveConversationId, + conversationHistory, + contexts: agentContexts, + fileAttachments, + commands, + chatId: actualChatId, + prefetch, + userName: session?.user?.name || undefined, + implicitFeedback, + }, + { + selectedModel, } - } | null = null - - if (effectiveMode === 'build') { - // Build base tools (executed locally, not deferred) - // Include function_execute for code execution capability - baseTools = [ - { - name: 'function_execute', - description: - 'Execute JavaScript code to perform calculations, data transformations, API calls, or any programmatic task. Code runs in a secure sandbox with fetch() available. Write plain statements (not wrapped in functions). Example: const res = await fetch(url); const data = await res.json(); return data;', - input_schema: { - type: 'object', - properties: { - code: { - type: 'string', - description: - 'Raw JavaScript statements to execute. Code is auto-wrapped in async context. Use fetch() for HTTP requests. Write like: const res = await fetch(url); return await res.json();', - }, - }, - required: ['code'], - }, - executeLocally: true, - }, - ] - // Fetch user credentials (OAuth + API keys) - pass workflowId to get workspace env vars - try { - const rawCredentials = await getCredentialsServerTool.execute( - { workflowId }, - { userId: authenticatedUserId } - ) - - // Transform OAuth credentials to map format: { [provider]: { accessToken, accountId, ... } } - const oauthMap: Record< - string, - { accessToken: string; accountId: string; name: string; expiresAt?: string } - > = {} - const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = [] - for (const cred of rawCredentials?.oauth?.connected?.credentials || []) { - if (cred.accessToken) { - oauthMap[cred.provider] = { - accessToken: cred.accessToken, - accountId: cred.id, - name: cred.name, - } - connectedOAuth.push({ - provider: cred.provider, - name: cred.name, - }) - } - } - - credentials = { - oauth: oauthMap, - apiKeys: rawCredentials?.environment?.variableNames || [], - metadata: { - connectedOAuth, - configuredApiKeys: rawCredentials?.environment?.variableNames || [], - }, - } - - logger.info(`[${tracker.requestId}] Fetched credentials for build mode`, { - oauthProviders: Object.keys(oauthMap), - apiKeyCount: credentials.apiKeys.length, - }) - } catch (error) { - logger.warn(`[${tracker.requestId}] Failed to fetch credentials`, { - error: error instanceof Error ? error.message : String(error), - }) - } - - // Build tool definitions (schemas only) - try { - const { createUserToolSchema } = await import('@/tools/params') - - const latestTools = getLatestVersionTools(tools) - - integrationTools = Object.entries(latestTools).map(([toolId, toolConfig]) => { - const userSchema = createUserToolSchema(toolConfig) - const strippedName = stripVersionSuffix(toolId) - return { - name: strippedName, - description: toolConfig.description || toolConfig.name || strippedName, - input_schema: userSchema, - defer_loading: true, // Anthropic Advanced Tool Use - ...(toolConfig.oauth?.required && { - oauth: { - required: true, - provider: toolConfig.oauth.provider, - }, - }), - } - }) - - logger.info(`[${tracker.requestId}] Built tool definitions for build mode`, { - integrationToolCount: integrationTools.length, - }) - } catch (error) { - logger.warn(`[${tracker.requestId}] Failed to build tool definitions`, { - error: error instanceof Error ? error.message : String(error), - }) - } - } - - const requestPayload = { - message: message, // Just send the current user message text - workflowId, - userId: authenticatedUserId, - stream: stream, - streamToolCalls: true, - model: selectedModel, - mode: transportMode, - messageId: userMessageIdToUse, - version: SIM_AGENT_VERSION, - ...(providerConfig ? { provider: providerConfig } : {}), - ...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}), - ...(typeof prefetch === 'boolean' ? { prefetch: prefetch } : {}), - ...(session?.user?.name && { userName: session.user.name }), - ...(agentContexts.length > 0 && { context: agentContexts }), - ...(actualChatId ? { chatId: actualChatId } : {}), - ...(processedFileContents.length > 0 && { fileAttachments: processedFileContents }), - // For build/agent mode, include tools and credentials - ...(integrationTools.length > 0 && { tools: integrationTools }), - ...(baseTools.length > 0 && { baseTools }), - ...(credentials && { credentials }), - ...(commands && commands.length > 0 && { commands }), - } + ) try { logger.info(`[${tracker.requestId}] About to call Sim Agent`, { hasContext: agentContexts.length > 0, contextCount: agentContexts.length, hasConversationId: !!effectiveConversationId, - hasFileAttachments: processedFileContents.length > 0, + hasFileAttachments: Array.isArray(requestPayload.fileAttachments), messageLength: message.length, mode: effectiveMode, - hasTools: integrationTools.length > 0, - toolCount: integrationTools.length, - hasBaseTools: baseTools.length > 0, - baseToolCount: baseTools.length, - hasCredentials: !!credentials, + hasTools: Array.isArray(requestPayload.tools), + toolCount: Array.isArray(requestPayload.tools) ? requestPayload.tools.length : 0, + hasBaseTools: Array.isArray(requestPayload.baseTools), + baseToolCount: Array.isArray(requestPayload.baseTools) ? requestPayload.baseTools.length : 0, + hasCredentials: !!requestPayload.credentials, }) } catch {} @@ -623,7 +374,7 @@ export async function POST(req: NextRequest) { content: nonStreamingResult.content, toolCalls: nonStreamingResult.toolCalls, model: selectedModel, - provider: providerConfig?.provider || env.COPILOT_PROVIDER || 'openai', + provider: (requestPayload?.provider as Record)?.provider || env.COPILOT_PROVIDER || 'openai', } logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, { diff --git a/apps/sim/app/api/copilot/confirm/route.ts b/apps/sim/app/api/copilot/confirm/route.ts index 01b6672a3..eb63b7524 100644 --- a/apps/sim/app/api/copilot/confirm/route.ts +++ b/apps/sim/app/api/copilot/confirm/route.ts @@ -1,6 +1,7 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants' import { authenticateCopilotRequestSessionOnly, createBadRequestResponse, @@ -38,13 +39,13 @@ async function updateToolCallStatus( } try { - const key = `tool_call:${toolCallId}` + const key = `${REDIS_TOOL_CALL_PREFIX}${toolCallId}` const payload = { status, message: message || null, timestamp: new Date().toISOString(), } - await redis.set(key, JSON.stringify(payload), 'EX', 86400) + await redis.set(key, JSON.stringify(payload), 'EX', REDIS_TOOL_CALL_TTL_SECONDS) return true } catch (error) { logger.error('Failed to update tool call status', { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/copilot-message.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/copilot-message.tsx index 1e745f3f2..187ff1594 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/copilot-message.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/copilot-message.tsx @@ -211,7 +211,7 @@ const CopilotMessage: FC = memo( if (block.type === 'text') { const isLastTextBlock = index === message.contentBlocks!.length - 1 && block.type === 'text' - const parsed = parseSpecialTags(block.content) + const parsed = parseSpecialTags(block.content ?? '') // Mask credential IDs in the displayed content const cleanBlockContent = maskCredentialValue( parsed.cleanContent.replace(/\n{3,}/g, '\n\n') @@ -243,7 +243,7 @@ const CopilotMessage: FC = memo( return (
= memo(
) } - if (block.type === 'tool_call') { + if (block.type === 'tool_call' && block.toolCall) { const blockKey = `tool-${block.toolCall.id}` return ( diff --git a/apps/sim/hooks/use-undo-redo.ts b/apps/sim/hooks/use-undo-redo.ts index 252f0785a..10873859e 100644 --- a/apps/sim/hooks/use-undo-redo.ts +++ b/apps/sim/hooks/use-undo-redo.ts @@ -1,5 +1,11 @@ import { useCallback } from 'react' import { createLogger } from '@sim/logger' + +declare global { + interface Window { + __skipDiffRecording?: boolean + } +} import type { Edge } from 'reactflow' import { useSession } from '@/lib/auth/auth-client' import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations' @@ -908,7 +914,7 @@ export function useUndoRedo() { // Set flag to skip recording during this operation - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Restore baseline state and broadcast to everyone if (baselineSnapshot && activeWorkflowId) { @@ -945,7 +951,7 @@ export function useUndoRedo() { logger.info('Clearing diff UI state') useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Undid apply-diff operation successfully') @@ -965,7 +971,7 @@ export function useUndoRedo() { // Set flag to skip recording during this operation - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Apply the before-accept state (with markers for this user) useWorkflowStore.getState().replaceWorkflowState(beforeAccept) @@ -1004,7 +1010,7 @@ export function useUndoRedo() { diffAnalysis: diffAnalysis, }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Undid accept-diff operation - restored diff view') @@ -1018,7 +1024,7 @@ export function useUndoRedo() { const { useWorkflowStore } = await import('@/stores/workflows/workflow/store') const { useSubBlockStore } = await import('@/stores/workflows/subblock/store') - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Apply the before-reject state (with markers for this user) useWorkflowStore.getState().replaceWorkflowState(beforeReject) @@ -1055,7 +1061,7 @@ export function useUndoRedo() { diffAnalysis: diffAnalysis, }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Undid reject-diff operation - restored diff view') @@ -1526,7 +1532,7 @@ export function useUndoRedo() { // Set flag to skip recording during this operation - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline) const diffStore = useWorkflowDiffStore.getState() @@ -1567,7 +1573,7 @@ export function useUndoRedo() { diffAnalysis: diffAnalysis, }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Redid apply-diff operation') @@ -1583,7 +1589,7 @@ export function useUndoRedo() { // Set flag to skip recording during this operation - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Clear diff state FIRST to prevent flash of colors (local UI only) // Use setState directly to ensure synchronous clearing @@ -1621,7 +1627,7 @@ export function useUndoRedo() { operationId: opId, }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Redid accept-diff operation - cleared diff view') @@ -1635,7 +1641,7 @@ export function useUndoRedo() { const { useWorkflowStore } = await import('@/stores/workflows/workflow/store') const { useSubBlockStore } = await import('@/stores/workflows/subblock/store') - ;(window as any).__skipDiffRecording = true + ;window.__skipDiffRecording = true try { // Clear diff state FIRST to prevent flash of colors (local UI only) // Use setState directly to ensure synchronous clearing @@ -1673,7 +1679,7 @@ export function useUndoRedo() { operationId: opId, }) } finally { - ;(window as any).__skipDiffRecording = false + ;window.__skipDiffRecording = false } logger.info('Redid reject-diff operation - cleared diff view') diff --git a/apps/sim/lib/copilot/api.ts b/apps/sim/lib/copilot/api.ts index 089d6bac7..19d0f6f7b 100644 --- a/apps/sim/lib/copilot/api.ts +++ b/apps/sim/lib/copilot/api.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { COPILOT_CHAT_API_PATH, COPILOT_CHAT_STREAM_API_PATH } from '@/lib/copilot/constants' import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models' const logger = createLogger('CopilotAPI') @@ -139,7 +140,9 @@ export async function sendStreamingMessage( contextsPreview: preview, resumeFromEventId, }) - } catch {} + } catch (error) { + logger.warn('Failed to log streaming message context preview', { error: error instanceof Error ? error.message : String(error) }) + } const streamId = request.userMessageId if (typeof resumeFromEventId === 'number') { @@ -150,7 +153,7 @@ export async function sendStreamingMessage( status: 400, } } - const url = `/api/copilot/chat/stream?streamId=${encodeURIComponent( + const url = `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent( streamId )}&from=${encodeURIComponent(String(resumeFromEventId))}` const response = await fetch(url, { @@ -182,7 +185,7 @@ export async function sendStreamingMessage( } } - const response = await fetch('/api/copilot/chat', { + const response = await fetch(COPILOT_CHAT_API_PATH, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ ...requestBody, stream: true }), diff --git a/apps/sim/lib/copilot/chat-context.ts b/apps/sim/lib/copilot/chat-context.ts new file mode 100644 index 000000000..d1377eb4a --- /dev/null +++ b/apps/sim/lib/copilot/chat-context.ts @@ -0,0 +1,63 @@ +import { createLogger } from '@sim/logger' +import { CopilotFiles } from '@/lib/uploads' +import { createFileContent } from '@/lib/uploads/utils/file-utils' + +const logger = createLogger('CopilotChatContext') + +/** + * Build conversation history from stored chat messages. + */ +export function buildConversationHistory( + messages: unknown[], + conversationId?: string +): { history: unknown[]; conversationId?: string } { + const history = Array.isArray(messages) ? messages : [] + return { + history, + ...(conversationId ? { conversationId } : {}), + } +} + +export interface FileAttachmentInput { + id: string + key: string + name?: string + filename?: string + mimeType?: string + media_type?: string + size: number +} + +export interface FileContent { + type: string + [key: string]: unknown +} + +/** + * Process file attachments into content for the payload. + */ +export async function processFileAttachments( + fileAttachments: FileAttachmentInput[], + userId: string +): Promise { + if (!Array.isArray(fileAttachments) || fileAttachments.length === 0) return [] + + const processedFileContents: FileContent[] = [] + const requestId = `copilot-${userId}-${Date.now()}` + const processedAttachments = await CopilotFiles.processCopilotAttachments(fileAttachments as Parameters[0], requestId) + + for (const { buffer, attachment } of processedAttachments) { + const fileContent = createFileContent(buffer, attachment.media_type) + if (fileContent) { + processedFileContents.push(fileContent as FileContent) + } + } + + logger.debug('Processed file attachments for payload', { + userId, + inputCount: fileAttachments.length, + outputCount: processedFileContents.length, + }) + + return processedFileContents +} diff --git a/apps/sim/lib/copilot/chat-lifecycle.ts b/apps/sim/lib/copilot/chat-lifecycle.ts new file mode 100644 index 000000000..5d25eee24 --- /dev/null +++ b/apps/sim/lib/copilot/chat-lifecycle.ts @@ -0,0 +1,69 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' + +const logger = createLogger('CopilotChatLifecycle') + +export interface ChatLoadResult { + chatId: string + chat: typeof copilotChats.$inferSelect | null + conversationHistory: unknown[] + isNew: boolean +} + +/** + * Resolve or create a copilot chat session. + * If chatId is provided, loads the existing chat. Otherwise creates a new one. + */ +export async function resolveOrCreateChat(params: { + chatId?: string + userId: string + workflowId: string + model: string +}): Promise { + const { chatId, userId, workflowId, model } = params + + if (chatId) { + const [chat] = await db + .select() + .from(copilotChats) + .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId))) + .limit(1) + + return { + chatId, + chat: chat ?? null, + conversationHistory: chat && Array.isArray(chat.messages) ? chat.messages : [], + isNew: false, + } + } + + const [newChat] = await db + .insert(copilotChats) + .values({ + userId, + workflowId, + title: null, + model, + messages: [], + }) + .returning() + + if (!newChat) { + logger.warn('Failed to create new copilot chat row', { userId, workflowId }) + return { + chatId: '', + chat: null, + conversationHistory: [], + isNew: true, + } + } + + return { + chatId: newChat.id, + chat: newChat, + conversationHistory: [], + isNew: true, + } +} diff --git a/apps/sim/lib/copilot/chat-payload.ts b/apps/sim/lib/copilot/chat-payload.ts new file mode 100644 index 000000000..7883f4234 --- /dev/null +++ b/apps/sim/lib/copilot/chat-payload.ts @@ -0,0 +1,252 @@ +import { createLogger } from '@sim/logger' +import { env } from '@/lib/core/config/env' +import { getCopilotModel } from '@/lib/copilot/config' +import { SIM_AGENT_VERSION } from '@/lib/copilot/constants' +import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials' +import type { CopilotProviderConfig } from '@/lib/copilot/types' +import { tools } from '@/tools/registry' +import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils' +import { type FileContent, processFileAttachments } from '@/lib/copilot/chat-context' + +const logger = createLogger('CopilotChatPayload') + +export interface BuildPayloadParams { + message: string + workflowId: string + userId: string + userMessageId: string + mode: string + model: string + stream: boolean + conversationId?: string + conversationHistory?: unknown[] + contexts?: Array<{ type: string; content: string }> + fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }> + commands?: string[] + chatId?: string + prefetch?: boolean + userName?: string + implicitFeedback?: string +} + +interface ToolSchema { + name: string + description: string + input_schema: Record + defer_loading?: boolean + executeLocally?: boolean + oauth?: { required: boolean; provider: string } +} + +interface CredentialsPayload { + oauth: Record + apiKeys: string[] + metadata?: { + connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> + configuredApiKeys: string[] + } +} + +type MessageContent = string | Array<{ type: string; text?: string; [key: string]: unknown }> + +interface ConversationMessage { + role: string + content: MessageContent +} + +function buildProviderConfig(selectedModel: string): CopilotProviderConfig | undefined { + const defaults = getCopilotModel('chat') + const envModel = env.COPILOT_MODEL || defaults.model + const providerEnv = env.COPILOT_PROVIDER + + if (!providerEnv) return undefined + + if (providerEnv === 'azure-openai') { + return { + provider: 'azure-openai', + model: envModel, + apiKey: env.AZURE_OPENAI_API_KEY, + apiVersion: 'preview', + endpoint: env.AZURE_OPENAI_ENDPOINT, + } + } + + if (providerEnv === 'vertex') { + return { + provider: 'vertex', + model: envModel, + apiKey: env.COPILOT_API_KEY, + vertexProject: env.VERTEX_PROJECT, + vertexLocation: env.VERTEX_LOCATION, + } + } + + return { + provider: providerEnv as Exclude, + model: selectedModel, + apiKey: env.COPILOT_API_KEY, + } as CopilotProviderConfig +} + +/** + * Build the request payload for the copilot backend. + */ +export async function buildCopilotRequestPayload( + params: BuildPayloadParams, + options: { + providerConfig?: CopilotProviderConfig + selectedModel: string + } +): Promise> { + const { + message, workflowId, userId, userMessageId, mode, stream, + conversationId, conversationHistory = [], contexts, fileAttachments, + commands, chatId, prefetch, userName, implicitFeedback, + } = params + + const selectedModel = options.selectedModel + const providerConfig = options.providerConfig ?? buildProviderConfig(selectedModel) + + const effectiveMode = mode === 'agent' ? 'build' : mode + const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode + + const processedFileContents = await processFileAttachments(fileAttachments ?? [], userId) + + const messages: ConversationMessage[] = [] + for (const msg of conversationHistory as Array>) { + const msgAttachments = msg.fileAttachments as Array> | undefined + if (Array.isArray(msgAttachments) && msgAttachments.length > 0) { + const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [ + { type: 'text', text: msg.content as string }, + ] + const processedHistoricalAttachments = await processFileAttachments(msgAttachments as BuildPayloadParams['fileAttachments'] ?? [], userId) + for (const fileContent of processedHistoricalAttachments) { + content.push(fileContent) + } + messages.push({ role: msg.role as string, content }) + } else { + messages.push({ role: msg.role as string, content: msg.content as string }) + } + } + + if (implicitFeedback) { + messages.push({ role: 'system', content: implicitFeedback }) + } + + if (processedFileContents.length > 0) { + const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [ + { type: 'text', text: message }, + ] + for (const fileContent of processedFileContents) { + content.push(fileContent) + } + messages.push({ role: 'user', content }) + } else { + messages.push({ role: 'user', content: message }) + } + + let integrationTools: ToolSchema[] = [] + let baseTools: ToolSchema[] = [] + let credentials: CredentialsPayload | null = null + + if (effectiveMode === 'build') { + baseTools = [ + { + name: 'function_execute', + description: + 'Execute JavaScript code to perform calculations, data transformations, API calls, or any programmatic task. Code runs in a secure sandbox with fetch() available. Write plain statements (not wrapped in functions). Example: const res = await fetch(url); const data = await res.json(); return data;', + input_schema: { + type: 'object', + properties: { + code: { + type: 'string', + description: + 'Raw JavaScript statements to execute. Code is auto-wrapped in async context. Use fetch() for HTTP requests. Write like: const res = await fetch(url); return await res.json();', + }, + }, + required: ['code'], + }, + executeLocally: true, + }, + ] + + try { + const rawCredentials = await getCredentialsServerTool.execute({ workflowId }, { userId }) + + const oauthMap: CredentialsPayload['oauth'] = {} + const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = [] + for (const cred of rawCredentials?.oauth?.connected?.credentials ?? []) { + if (cred.accessToken) { + oauthMap[cred.provider] = { + accessToken: cred.accessToken, + accountId: cred.id, + name: cred.name, + } + connectedOAuth.push({ provider: cred.provider, name: cred.name }) + } + } + + credentials = { + oauth: oauthMap, + apiKeys: rawCredentials?.environment?.variableNames ?? [], + metadata: { + connectedOAuth, + configuredApiKeys: rawCredentials?.environment?.variableNames ?? [], + }, + } + } catch (error) { + logger.warn('Failed to fetch credentials for build payload', { + error: error instanceof Error ? error.message : String(error), + }) + } + + try { + const { createUserToolSchema } = await import('@/tools/params') + const latestTools = getLatestVersionTools(tools) + + integrationTools = Object.entries(latestTools).map(([toolId, toolConfig]) => { + const userSchema = createUserToolSchema(toolConfig) + const strippedName = stripVersionSuffix(toolId) + return { + name: strippedName, + description: toolConfig.description || toolConfig.name || strippedName, + input_schema: userSchema as unknown as Record, + defer_loading: true, + ...(toolConfig.oauth?.required && { + oauth: { + required: true, + provider: toolConfig.oauth.provider, + }, + }), + } + }) + } catch (error) { + logger.warn('Failed to build tool schemas for payload', { + error: error instanceof Error ? error.message : String(error), + }) + } + } + + return { + message, + workflowId, + userId, + stream, + streamToolCalls: true, + model: selectedModel, + mode: transportMode, + messageId: userMessageId, + version: SIM_AGENT_VERSION, + ...(providerConfig ? { provider: providerConfig } : {}), + ...(conversationId ? { conversationId } : {}), + ...(typeof prefetch === 'boolean' ? { prefetch } : {}), + ...(userName ? { userName } : {}), + ...(contexts && contexts.length > 0 ? { context: contexts } : {}), + ...(chatId ? { chatId } : {}), + ...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}), + ...(integrationTools.length > 0 ? { tools: integrationTools } : {}), + ...(baseTools.length > 0 ? { baseTools } : {}), + ...(credentials ? { credentials } : {}), + ...(commands && commands.length > 0 ? { commands } : {}), + } +} diff --git a/apps/sim/lib/copilot/client-sse/content-blocks.ts b/apps/sim/lib/copilot/client-sse/content-blocks.ts index c2ee72458..1ce416bc6 100644 --- a/apps/sim/lib/copilot/client-sse/content-blocks.ts +++ b/apps/sim/lib/copilot/client-sse/content-blocks.ts @@ -3,7 +3,7 @@ import type { CopilotMessage, MessageFileAttachment, } from '@/stores/panel/copilot/types' -import type { StreamingContext } from './types' +import type { ClientContentBlock, ClientStreamingContext } from './types' const TEXT_BLOCK_TYPE = 'text' const THINKING_BLOCK_TYPE = 'thinking' @@ -25,8 +25,8 @@ export function createUserMessage( ...(contexts && contexts.length > 0 && { contentBlocks: [ - { type: 'contexts', contexts: contexts as any, timestamp: Date.now() }, - ] as any, + { type: 'contexts', contexts, timestamp: Date.now() }, + ], }), } } @@ -61,7 +61,7 @@ export function createErrorMessage( } } -export function appendTextBlock(context: StreamingContext, text: string) { +export function appendTextBlock(context: ClientStreamingContext, text: string) { if (!text) return context.accumulatedContent += text if (context.currentTextBlock && context.contentBlocks.length > 0) { @@ -71,11 +71,9 @@ export function appendTextBlock(context: StreamingContext, text: string) { return } } - context.currentTextBlock = { type: '', content: '', timestamp: 0, toolCall: null } - context.currentTextBlock.type = TEXT_BLOCK_TYPE - context.currentTextBlock.content = text - context.currentTextBlock.timestamp = Date.now() - context.contentBlocks.push(context.currentTextBlock) + const newBlock: ClientContentBlock = { type: 'text', content: text, timestamp: Date.now() } + context.currentTextBlock = newBlock + context.contentBlocks.push(newBlock) } export function appendContinueOption(content: string): string { @@ -84,7 +82,7 @@ export function appendContinueOption(content: string): string { return `${content}${suffix}${CONTINUE_OPTIONS_TAG}` } -export function appendContinueOptionBlock(blocks: any[]): any[] { +export function appendContinueOptionBlock(blocks: ClientContentBlock[]): ClientContentBlock[] { if (!Array.isArray(blocks)) return blocks const hasOptions = blocks.some( (block) => @@ -109,7 +107,7 @@ export function stripContinueOption(content: string): string { return next.replace(/\n{2,}\s*$/g, '\n').trimEnd() } -export function stripContinueOptionFromBlocks(blocks: any[]): any[] { +export function stripContinueOptionFromBlocks(blocks: ClientContentBlock[]): ClientContentBlock[] { if (!Array.isArray(blocks)) return blocks return blocks.flatMap((block) => { if ( @@ -125,20 +123,17 @@ export function stripContinueOptionFromBlocks(blocks: any[]): any[] { }) } -export function beginThinkingBlock(context: StreamingContext) { +export function beginThinkingBlock(context: ClientStreamingContext) { if (!context.currentThinkingBlock) { - context.currentThinkingBlock = { type: '', content: '', timestamp: 0, toolCall: null } - context.currentThinkingBlock.type = THINKING_BLOCK_TYPE - context.currentThinkingBlock.content = '' - context.currentThinkingBlock.timestamp = Date.now() - ;(context.currentThinkingBlock as any).startTime = Date.now() - context.contentBlocks.push(context.currentThinkingBlock) + const newBlock: ClientContentBlock = { type: 'thinking', content: '', timestamp: Date.now(), startTime: Date.now() } + context.currentThinkingBlock = newBlock + context.contentBlocks.push(newBlock) } context.isInThinkingBlock = true context.currentTextBlock = null } -export function finalizeThinkingBlock(context: StreamingContext) { +export function finalizeThinkingBlock(context: ClientStreamingContext) { if (context.currentThinkingBlock) { context.currentThinkingBlock.duration = Date.now() - (context.currentThinkingBlock.startTime || Date.now()) diff --git a/apps/sim/lib/copilot/client-sse/handlers.ts b/apps/sim/lib/copilot/client-sse/handlers.ts index 169917578..484543163 100644 --- a/apps/sim/lib/copilot/client-sse/handlers.ts +++ b/apps/sim/lib/copilot/client-sse/handlers.ts @@ -1,28 +1,30 @@ import { createLogger } from '@sim/logger' +import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants' +import type { SSEEvent } from '@/lib/copilot/orchestrator/types' +import { asRecord } from '@/lib/copilot/orchestrator/sse-utils' import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry' -import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' -import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types' -import { - appendTextBlock, - beginThinkingBlock, - finalizeThinkingBlock, -} from './content-blocks' -import type { StreamingContext } from './types' import { isBackgroundState, isRejectedState, isReviewState, resolveToolDisplay, } from '@/lib/copilot/store-utils' +import { useWorkflowDiffStore } from '@/stores/workflow-diff/store' +import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types' +import { + appendTextBlock, + beginThinkingBlock, + finalizeThinkingBlock, +} from './content-blocks' +import type { ClientContentBlock, ClientStreamingContext } from './types' const logger = createLogger('CopilotClientSseHandlers') -const STREAM_STORAGE_KEY = 'copilot_active_stream' const TEXT_BLOCK_TYPE = 'text' const MAX_BATCH_INTERVAL = 50 const MIN_BATCH_INTERVAL = 16 const MAX_QUEUE_SIZE = 5 -function writeActiveStreamToStorage(info: any): void { +function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void { if (typeof window === 'undefined') return try { if (!info) { @@ -30,17 +32,25 @@ function writeActiveStreamToStorage(info: any): void { return } window.sessionStorage.setItem(STREAM_STORAGE_KEY, JSON.stringify(info)) - } catch {} + } catch (error) { + logger.warn('Failed to write active stream to storage', { + error: error instanceof Error ? error.message : String(error), + }) + } } +type StoreSet = ( + partial: Partial | ((state: CopilotStore) => Partial) +) => void + export type SSEHandler = ( - data: any, - context: StreamingContext, + data: SSEEvent, + context: ClientStreamingContext, get: () => CopilotStore, - set: any + set: StoreSet ) => Promise | void -const streamingUpdateQueue = new Map() +const streamingUpdateQueue = new Map() let streamingUpdateRAF: number | null = null let lastBatchTime = 0 @@ -52,8 +62,8 @@ export function stopStreamingUpdates() { streamingUpdateQueue.clear() } -function createOptimizedContentBlocks(contentBlocks: any[]): any[] { - const result: any[] = new Array(contentBlocks.length) +function createOptimizedContentBlocks(contentBlocks: ClientContentBlock[]): ClientContentBlock[] { + const result: ClientContentBlock[] = new Array(contentBlocks.length) for (let i = 0; i < contentBlocks.length; i++) { const block = contentBlocks[i] result[i] = { ...block } @@ -61,7 +71,7 @@ function createOptimizedContentBlocks(contentBlocks: any[]): any[] { return result } -export function flushStreamingUpdates(set: any) { +export function flushStreamingUpdates(set: StoreSet) { if (streamingUpdateRAF !== null) { cancelAnimationFrame(streamingUpdateRAF) streamingUpdateRAF = null @@ -90,7 +100,7 @@ export function flushStreamingUpdates(set: any) { }) } -export function updateStreamingMessage(set: any, context: StreamingContext) { +export function updateStreamingMessage(set: StoreSet, context: ClientStreamingContext) { if (context.suppressStreamingUpdates) return const now = performance.now() streamingUpdateQueue.set(context.messageId, context) @@ -146,10 +156,10 @@ export function updateStreamingMessage(set: any, context: StreamingContext) { } } -export function upsertToolCallBlock(context: StreamingContext, toolCall: CopilotToolCall) { +export function upsertToolCallBlock(context: ClientStreamingContext, toolCall: CopilotToolCall) { let found = false for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] as any + const b = context.contentBlocks[i] if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) { context.contentBlocks[i] = { ...b, toolCall } found = true @@ -165,19 +175,16 @@ function stripThinkingTags(text: string): string { return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '') } -function appendThinkingContent(context: StreamingContext, text: string) { +function appendThinkingContent(context: ClientStreamingContext, text: string) { if (!text) return const cleanedText = stripThinkingTags(text) if (!cleanedText) return if (context.currentThinkingBlock) { context.currentThinkingBlock.content += cleanedText } else { - context.currentThinkingBlock = { type: '', content: '', timestamp: 0, toolCall: null } - context.currentThinkingBlock.type = 'thinking' - context.currentThinkingBlock.content = cleanedText - context.currentThinkingBlock.timestamp = Date.now() - context.currentThinkingBlock.startTime = Date.now() - context.contentBlocks.push(context.currentThinkingBlock) + const newBlock: ClientContentBlock = { type: 'thinking', content: cleanedText, timestamp: Date.now(), startTime: Date.now() } + context.currentThinkingBlock = newBlock + context.contentBlocks.push(newBlock) } context.isInThinkingBlock = true context.currentTextBlock = null @@ -209,10 +216,12 @@ export const sseHandlers: Record = { }, tool_result: (data, context, get, set) => { try { - const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const eventData = asRecord(data?.data) + const toolCallId: string | undefined = data?.toolCallId || (eventData.id as string | undefined) const success: boolean | undefined = data?.success const failedDependency: boolean = data?.failedDependency === true - const skipped: boolean = data?.result?.skipped === true + const resultObj = asRecord(data?.result) + const skipped: boolean = resultObj.skipped === true if (!toolCallId) return const { toolCallsById } = get() const current = toolCallsById[toolCallId] @@ -233,24 +242,24 @@ export const sseHandlers: Record = { updatedMap[toolCallId] = { ...current, state: targetState, - display: resolveToolDisplay( - current.name, - targetState, - current.id, - (current as any).params - ), + display: resolveToolDisplay(current.name, targetState, current.id, current.params), } set({ toolCallsById: updatedMap }) if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') { try { - const result = (data?.result || data?.data?.result) ?? {} - const input = ((current as any).params || (current as any).input) ?? {} - const todoId = input.id || input.todoId || result.id || result.todoId + const result = asRecord(data?.result) || asRecord(eventData.result) + const input = asRecord(current.params || current.input) + const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined if (todoId) { get().updatePlanTodoStatus(todoId, 'completed') } - } catch {} + } catch (error) { + logger.warn('Failed to process checkoff_todo tool result', { + error: error instanceof Error ? error.message : String(error), + toolCallId, + }) + } } if ( @@ -258,28 +267,35 @@ export const sseHandlers: Record = { current.name === 'mark_todo_in_progress' ) { try { - const result = (data?.result || data?.data?.result) ?? {} - const input = ((current as any).params || (current as any).input) ?? {} - const todoId = input.id || input.todoId || result.id || result.todoId + const result = asRecord(data?.result) || asRecord(eventData.result) + const input = asRecord(current.params || current.input) + const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined if (todoId) { get().updatePlanTodoStatus(todoId, 'executing') } - } catch {} + } catch (error) { + logger.warn('Failed to process mark_todo_in_progress tool result', { + error: error instanceof Error ? error.message : String(error), + toolCallId, + }) + } } if (current.name === 'edit_workflow') { try { - const resultPayload = - (data?.result || data?.data?.result || data?.data?.data || data?.data) ?? {} - const workflowState = resultPayload?.workflowState + const resultPayload = asRecord( + data?.result || eventData.result || eventData.data || data?.data + ) + const workflowState = asRecord(resultPayload?.workflowState) + const hasWorkflowState = !!resultPayload?.workflowState logger.info('[SSE] edit_workflow result received', { - hasWorkflowState: !!workflowState, - blockCount: workflowState ? Object.keys(workflowState.blocks ?? {}).length : 0, - edgeCount: workflowState?.edges?.length ?? 0, + hasWorkflowState, + blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0, + edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0, }) - if (workflowState) { + if (hasWorkflowState) { const diffStore = useWorkflowDiffStore.getState() - diffStore.setProposedChanges(workflowState).catch((err) => { + diffStore.setProposedChanges(resultPayload.workflowState).catch((err) => { logger.error('[SSE] Failed to apply edit_workflow diff', { error: err instanceof Error ? err.message : String(err), }) @@ -294,7 +310,7 @@ export const sseHandlers: Record = { } for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] as any + const b = context.contentBlocks[i] if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { if ( isRejectedState(b.toolCall?.state) || @@ -324,11 +340,16 @@ export const sseHandlers: Record = { } } updateStreamingMessage(set, context) - } catch {} + } catch (error) { + logger.warn('Failed to process tool_result SSE event', { + error: error instanceof Error ? error.message : String(error), + }) + } }, tool_error: (data, context, get, set) => { try { - const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const errorData = asRecord(data?.data) + const toolCallId: string | undefined = data?.toolCallId || (errorData.id as string | undefined) const failedDependency: boolean = data?.failedDependency === true if (!toolCallId) return const { toolCallsById } = get() @@ -348,17 +369,12 @@ export const sseHandlers: Record = { updatedMap[toolCallId] = { ...current, state: targetState, - display: resolveToolDisplay( - current.name, - targetState, - current.id, - (current as any).params - ), + display: resolveToolDisplay(current.name, targetState, current.id, current.params), } set({ toolCallsById: updatedMap }) } for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] as any + const b = context.contentBlocks[i] if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) { if ( isRejectedState(b.toolCall?.state) || @@ -386,7 +402,11 @@ export const sseHandlers: Record = { } } updateStreamingMessage(set, context) - } catch {} + } catch (error) { + logger.warn('Failed to process tool_error SSE event', { + error: error instanceof Error ? error.message : String(error), + }) + } }, tool_generating: (data, context, get, set) => { const { toolCallId, toolName } = data @@ -410,11 +430,11 @@ export const sseHandlers: Record = { } }, tool_call: (data, context, get, set) => { - const toolData = data?.data ?? {} - const id: string | undefined = toolData.id || data?.toolCallId - const name: string | undefined = toolData.name || data?.toolName + const toolData = asRecord(data?.data) + const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId + const name: string | undefined = (toolData.name as string | undefined) || data?.toolName if (!id) return - const args = toolData.arguments + const args = toolData.arguments as Record | undefined const isPartial = toolData.partial === true const { toolCallsById } = get() diff --git a/apps/sim/lib/copilot/client-sse/subagent-handlers.ts b/apps/sim/lib/copilot/client-sse/subagent-handlers.ts index fa2fc2e1c..e68a552b6 100644 --- a/apps/sim/lib/copilot/client-sse/subagent-handlers.ts +++ b/apps/sim/lib/copilot/client-sse/subagent-handlers.ts @@ -1,19 +1,25 @@ import { createLogger } from '@sim/logger' import { + asRecord, normalizeSseEvent, shouldSkipToolCallEvent, shouldSkipToolResultEvent, } from '@/lib/copilot/orchestrator/sse-utils' +import type { SSEEvent } from '@/lib/copilot/orchestrator/types' import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry' import { resolveToolDisplay } from '@/lib/copilot/store-utils' import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types' -import type { StreamingContext } from './types' +import type { ClientStreamingContext } from './types' import { sseHandlers, type SSEHandler, updateStreamingMessage } from './handlers' const logger = createLogger('CopilotClientSubagentHandlers') +type StoreSet = ( + partial: Partial | ((state: CopilotStore) => Partial) +) => void + export function appendSubAgentContent( - context: StreamingContext, + context: ClientStreamingContext, parentToolCallId: string, text: string ) { @@ -38,9 +44,9 @@ export function appendSubAgentContent( } export function updateToolCallWithSubAgentData( - context: StreamingContext, + context: ClientStreamingContext, get: () => CopilotStore, - set: any, + set: StoreSet, parentToolCallId: string ) { const { toolCallsById } = get() @@ -76,7 +82,7 @@ export function updateToolCallWithSubAgentData( let foundInContentBlocks = false for (let i = 0; i < context.contentBlocks.length; i++) { - const b = context.contentBlocks[i] as any + const b = context.contentBlocks[i] if (b.type === 'tool_call' && b.toolCall?.id === parentToolCallId) { context.contentBlocks[i] = { ...b, toolCall: updatedToolCall } foundInContentBlocks = true @@ -89,8 +95,8 @@ export function updateToolCallWithSubAgentData( parentToolCallId, contentBlocksCount: context.contentBlocks.length, toolCallBlockIds: context.contentBlocks - .filter((b: any) => b.type === 'tool_call') - .map((b: any) => b.toolCall?.id), + .filter((b) => b.type === 'tool_call') + .map((b) => b.toolCall?.id), }) } @@ -104,27 +110,29 @@ export const subAgentSSEHandlers: Record = { content: (data, context, get, set) => { const parentToolCallId = context.subAgentParentToolCallId + const contentStr = typeof data.data === 'string' ? data.data : (data.content || '') logger.info('[SubAgent] content event', { parentToolCallId, - hasData: !!data.data, - dataPreview: typeof data.data === 'string' ? data.data.substring(0, 50) : null, + hasData: !!contentStr, + dataPreview: contentStr ? contentStr.substring(0, 50) : null, }) - if (!parentToolCallId || !data.data) { + if (!parentToolCallId || !contentStr) { logger.warn('[SubAgent] content missing parentToolCallId or data', { parentToolCallId, - hasData: !!data.data, + hasData: !!contentStr, }) return } - appendSubAgentContent(context, parentToolCallId, data.data) + appendSubAgentContent(context, parentToolCallId, contentStr) updateToolCallWithSubAgentData(context, get, set, parentToolCallId) }, reasoning: (data, context, get, set) => { const parentToolCallId = context.subAgentParentToolCallId - const phase = data?.phase || data?.data?.phase + const dataObj = asRecord(data?.data) + const phase = data?.phase || (dataObj.phase as string | undefined) if (!parentToolCallId) return if (phase === 'start' || phase === 'end') return @@ -145,17 +153,18 @@ export const subAgentSSEHandlers: Record = { const parentToolCallId = context.subAgentParentToolCallId if (!parentToolCallId) return - const toolData = data?.data ?? {} - const id: string | undefined = toolData.id || data?.toolCallId - const name: string | undefined = toolData.name || data?.toolName + const toolData = asRecord(data?.data) + const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId + const name: string | undefined = (toolData.name as string | undefined) || data?.toolName if (!id || !name) return const isPartial = toolData.partial === true - let args = toolData.arguments || toolData.input || data?.arguments || data?.input + let args: Record | undefined = + (toolData.arguments || toolData.input) as Record | undefined if (typeof args === 'string') { try { - args = JSON.parse(args) + args = JSON.parse(args) as Record } catch { logger.warn('[SubAgent] Failed to parse arguments string', { args }) } @@ -177,7 +186,9 @@ export const subAgentSSEHandlers: Record = { context.subAgentBlocks[parentToolCallId] = [] } - const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex((tc) => tc.id === id) + const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex( + (tc: CopilotToolCall) => tc.id === id + ) const subAgentToolCall: CopilotToolCall = { id, name, @@ -213,7 +224,8 @@ export const subAgentSSEHandlers: Record = { const parentToolCallId = context.subAgentParentToolCallId if (!parentToolCallId) return - const toolCallId: string | undefined = data?.toolCallId || data?.data?.id + const resultData = asRecord(data?.data) + const toolCallId: string | undefined = data?.toolCallId || (resultData.id as string | undefined) const success: boolean | undefined = data?.success !== false if (!toolCallId) return @@ -222,7 +234,7 @@ export const subAgentSSEHandlers: Record = { const targetState = success ? ClientToolCallState.success : ClientToolCallState.error const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex( - (tc) => tc.id === toolCallId + (tc: CopilotToolCall) => tc.id === toolCallId ) if (existingIndex >= 0) { @@ -268,19 +280,20 @@ export const subAgentSSEHandlers: Record = { } export async function applySseEvent( - data: any, - context: StreamingContext, + rawData: SSEEvent, + context: ClientStreamingContext, get: () => CopilotStore, set: (next: Partial | ((state: CopilotStore) => Partial)) => void ): Promise { - const normalizedEvent = normalizeSseEvent(data) + const normalizedEvent = normalizeSseEvent(rawData) if (shouldSkipToolCallEvent(normalizedEvent) || shouldSkipToolResultEvent(normalizedEvent)) { return true } - data = normalizedEvent + const data = normalizedEvent if (data.type === 'subagent_start') { - const toolCallId = data.data?.tool_call_id + const startData = asRecord(data.data) + const toolCallId = startData.tool_call_id as string | undefined if (toolCallId) { context.subAgentParentToolCallId = toolCallId const { toolCallsById } = get() diff --git a/apps/sim/lib/copilot/client-sse/types.ts b/apps/sim/lib/copilot/client-sse/types.ts index 82e5b99be..8a4616a84 100644 --- a/apps/sim/lib/copilot/client-sse/types.ts +++ b/apps/sim/lib/copilot/client-sse/types.ts @@ -1,12 +1,28 @@ -import type { CopilotToolCall } from '@/stores/panel/copilot/types' +import type { ChatContext, CopilotToolCall, SubAgentContentBlock } from '@/stores/panel/copilot/types' + +/** + * A content block used in copilot messages and during streaming. + * Uses a literal type union for `type` to stay compatible with CopilotMessage. + */ +export type ContentBlockType = 'text' | 'thinking' | 'tool_call' | 'contexts' + +export interface ClientContentBlock { + type: ContentBlockType + content?: string + timestamp: number + toolCall?: CopilotToolCall | null + startTime?: number + duration?: number + contexts?: ChatContext[] +} export interface StreamingContext { messageId: string accumulatedContent: string - contentBlocks: any[] - currentTextBlock: any | null + contentBlocks: ClientContentBlock[] + currentTextBlock: ClientContentBlock | null isInThinkingBlock: boolean - currentThinkingBlock: any | null + currentThinkingBlock: ClientContentBlock | null isInDesignWorkflowBlock: boolean designWorkflowContent: string pendingContent: string @@ -18,6 +34,8 @@ export interface StreamingContext { subAgentParentToolCallId?: string subAgentContent: Record subAgentToolCalls: Record - subAgentBlocks: Record + subAgentBlocks: Record suppressStreamingUpdates?: boolean } + +export type ClientStreamingContext = StreamingContext diff --git a/apps/sim/lib/copilot/constants.ts b/apps/sim/lib/copilot/constants.ts index 21e29cdbc..35c1acd2c 100644 --- a/apps/sim/lib/copilot/constants.ts +++ b/apps/sim/lib/copilot/constants.ts @@ -9,3 +9,107 @@ export const SIM_AGENT_API_URL = rawAgentUrl.startsWith('http://') || rawAgentUrl.startsWith('https://') ? rawAgentUrl : SIM_AGENT_API_URL_DEFAULT + +// --------------------------------------------------------------------------- +// Redis key prefixes +// --------------------------------------------------------------------------- + +/** Redis key prefix for tool call confirmation payloads (polled by waitForToolDecision). */ +export const REDIS_TOOL_CALL_PREFIX = 'tool_call:' + +/** Redis key prefix for copilot SSE stream buffers. */ +export const REDIS_COPILOT_STREAM_PREFIX = 'copilot_stream:' + +// --------------------------------------------------------------------------- +// Timeouts +// --------------------------------------------------------------------------- + +/** Default timeout for the copilot orchestration stream loop (5 min). */ +export const ORCHESTRATION_TIMEOUT_MS = 300_000 + +/** Timeout for the client-side streaming response handler (10 min). */ +export const STREAM_TIMEOUT_MS = 600_000 + +/** TTL for Redis tool call confirmation entries (24 h). */ +export const REDIS_TOOL_CALL_TTL_SECONDS = 86_400 + +// --------------------------------------------------------------------------- +// Tool decision polling +// --------------------------------------------------------------------------- + +/** Initial poll interval when waiting for a user tool decision. */ +export const TOOL_DECISION_INITIAL_POLL_MS = 100 + +/** Maximum poll interval when waiting for a user tool decision. */ +export const TOOL_DECISION_MAX_POLL_MS = 3_000 + +/** Backoff multiplier for the tool decision poll interval. */ +export const TOOL_DECISION_POLL_BACKOFF = 1.5 + +// --------------------------------------------------------------------------- +// Stream resume +// --------------------------------------------------------------------------- + +/** Maximum number of resume attempts before giving up. */ +export const MAX_RESUME_ATTEMPTS = 3 + +/** SessionStorage key for persisting active stream metadata across page reloads. */ +export const STREAM_STORAGE_KEY = 'copilot_active_stream' + +// --------------------------------------------------------------------------- +// Client-side streaming batching +// --------------------------------------------------------------------------- + +/** Delay (ms) before processing the next queued message after stream completion. */ +export const QUEUE_PROCESS_DELAY_MS = 100 + +/** Delay (ms) before invalidating subscription queries after stream completion. */ +export const SUBSCRIPTION_INVALIDATE_DELAY_MS = 1_000 + +// --------------------------------------------------------------------------- +// UI helpers +// --------------------------------------------------------------------------- + +/** Maximum character length for an optimistic chat title derived from a user message. */ +export const OPTIMISTIC_TITLE_MAX_LENGTH = 50 + +// --------------------------------------------------------------------------- +// Copilot API paths (client-side fetch targets) +// --------------------------------------------------------------------------- + +/** POST — send a chat message to the copilot. */ +export const COPILOT_CHAT_API_PATH = '/api/copilot/chat' + +/** GET — resume/replay a copilot SSE stream. */ +export const COPILOT_CHAT_STREAM_API_PATH = '/api/copilot/chat/stream' + +/** POST — persist chat messages / plan artifact / config. */ +export const COPILOT_UPDATE_MESSAGES_API_PATH = '/api/copilot/chat/update-messages' + +/** DELETE — delete a copilot chat. */ +export const COPILOT_DELETE_CHAT_API_PATH = '/api/copilot/chat/delete' + +/** POST — confirm or reject a tool call. */ +export const COPILOT_CONFIRM_API_PATH = '/api/copilot/confirm' + +/** POST — forward diff-accepted/rejected stats to the copilot backend. */ +export const COPILOT_STATS_API_PATH = '/api/copilot/stats' + +/** GET — load checkpoints for a chat. */ +export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints' + +/** POST — revert to a checkpoint. */ +export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert' + +/** GET/POST/DELETE — manage auto-allowed tools. */ +export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools' + +/** GET — fetch user credentials for masking. */ +export const COPILOT_CREDENTIALS_API_PATH = '/api/copilot/credentials' + +// --------------------------------------------------------------------------- +// Dedup limits +// --------------------------------------------------------------------------- + +/** Maximum entries in the in-memory SSE tool-event dedup cache. */ +export const STREAM_BUFFER_MAX_DEDUP_ENTRIES = 1_000 diff --git a/apps/sim/lib/copilot/messages/checkpoints.ts b/apps/sim/lib/copilot/messages/checkpoints.ts index 1a4847d6e..29eca04c3 100644 --- a/apps/sim/lib/copilot/messages/checkpoints.ts +++ b/apps/sim/lib/copilot/messages/checkpoints.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { COPILOT_CHECKPOINTS_API_PATH } from '@/lib/copilot/constants' import { mergeSubblockState } from '@/stores/workflows/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' import type { WorkflowState } from '@/stores/workflows/workflow/types' @@ -58,7 +59,7 @@ export async function saveMessageCheckpoint( set({ messageSnapshots: nextSnapshots }) try { - const response = await fetch('/api/copilot/checkpoints', { + const response = await fetch(COPILOT_CHECKPOINTS_API_PATH, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ @@ -117,7 +118,7 @@ export function buildToolCallsById(messages: CopilotMessage[]): Record = {} for (const msg of messages) { if (msg.contentBlocks) { - for (const block of msg.contentBlocks as any[]) { + for (const block of msg.contentBlocks) { if (block?.type === 'tool_call' && block.toolCall?.id) { extractToolCallsRecursively(block.toolCall, toolCallsById) } diff --git a/apps/sim/lib/copilot/messages/credential-masking.ts b/apps/sim/lib/copilot/messages/credential-masking.ts index f0e64eef8..33f154949 100644 --- a/apps/sim/lib/copilot/messages/credential-masking.ts +++ b/apps/sim/lib/copilot/messages/credential-masking.ts @@ -1,27 +1,30 @@ -export function maskCredentialIdsInValue(value: any, credentialIds: Set): any { +export function maskCredentialIdsInValue(value: T, credentialIds: Set): T { if (!value || credentialIds.size === 0) return value if (typeof value === 'string') { - let masked = value + let masked = value as string const sortedIds = Array.from(credentialIds).sort((a, b) => b.length - a.length) for (const id of sortedIds) { if (id && masked.includes(id)) { masked = masked.split(id).join('••••••••') } } - return masked + return masked as unknown as T } if (Array.isArray(value)) { - return value.map((item) => maskCredentialIdsInValue(item, credentialIds)) + return value.map((item) => maskCredentialIdsInValue(item, credentialIds)) as T } if (typeof value === 'object') { - const masked: any = {} - for (const key of Object.keys(value)) { - masked[key] = maskCredentialIdsInValue(value[key], credentialIds) + const masked: Record = {} + for (const key of Object.keys(value as Record)) { + masked[key] = maskCredentialIdsInValue( + (value as Record)[key], + credentialIds + ) } - return masked + return masked as T } return value diff --git a/apps/sim/lib/copilot/messages/index.ts b/apps/sim/lib/copilot/messages/index.ts index 4525fcdd8..2525a0079 100644 --- a/apps/sim/lib/copilot/messages/index.ts +++ b/apps/sim/lib/copilot/messages/index.ts @@ -1,3 +1,4 @@ export * from './credential-masking' export * from './serialization' export * from './checkpoints' +export * from './persist' diff --git a/apps/sim/lib/copilot/messages/persist.ts b/apps/sim/lib/copilot/messages/persist.ts new file mode 100644 index 000000000..9ca3a24fe --- /dev/null +++ b/apps/sim/lib/copilot/messages/persist.ts @@ -0,0 +1,43 @@ +import { createLogger } from '@sim/logger' +import { COPILOT_UPDATE_MESSAGES_API_PATH } from '@/lib/copilot/constants' +import type { CopilotMessage } from '@/stores/panel/copilot/types' +import { serializeMessagesForDB } from './serialization' + +const logger = createLogger('CopilotMessagePersistence') + +export async function persistMessages(params: { + chatId: string + messages: CopilotMessage[] + sensitiveCredentialIds?: Set + planArtifact?: string | null + mode?: string + model?: string + conversationId?: string +}): Promise { + try { + const dbMessages = serializeMessagesForDB( + params.messages, + params.sensitiveCredentialIds ?? new Set() + ) + const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + chatId: params.chatId, + messages: dbMessages, + ...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}), + ...(params.mode || params.model + ? { config: { mode: params.mode, model: params.model } } + : {}), + ...(params.conversationId ? { conversationId: params.conversationId } : {}), + }), + }) + return response.ok + } catch (error) { + logger.warn('Failed to persist messages', { + chatId: params.chatId, + error: error instanceof Error ? error.message : String(error), + }) + return false + } +} diff --git a/apps/sim/lib/copilot/messages/serialization.ts b/apps/sim/lib/copilot/messages/serialization.ts index e69bae218..bcc58e0cf 100644 --- a/apps/sim/lib/copilot/messages/serialization.ts +++ b/apps/sim/lib/copilot/messages/serialization.ts @@ -1,10 +1,10 @@ import { createLogger } from '@sim/logger' -import type { CopilotMessage } from '@/stores/panel/copilot/types' +import type { CopilotMessage, CopilotToolCall } from '@/stores/panel/copilot/types' import { maskCredentialIdsInValue } from './credential-masking' const logger = createLogger('CopilotMessageSerialization') -export function clearStreamingFlags(toolCall: any): void { +export function clearStreamingFlags(toolCall: CopilotToolCall): void { if (!toolCall) return toolCall.subAgentStreaming = false @@ -27,18 +27,18 @@ export function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessa try { for (const message of messages) { if (message.role === 'assistant') { - logger.info('[normalizeMessagesForUI] Loading assistant message', { + logger.debug('[normalizeMessagesForUI] Loading assistant message', { id: message.id, hasContent: !!message.content?.trim(), contentBlockCount: message.contentBlocks?.length || 0, - contentBlockTypes: (message.contentBlocks as any[])?.map((b) => b?.type) ?? [], + contentBlockTypes: message.contentBlocks?.map((b) => b?.type) ?? [], }) } } for (const message of messages) { if (message.contentBlocks) { - for (const block of message.contentBlocks as any[]) { + for (const block of message.contentBlocks) { if (block?.type === 'tool_call' && block.toolCall) { clearStreamingFlags(block.toolCall) } @@ -51,7 +51,10 @@ export function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessa } } return messages - } catch { + } catch (error) { + logger.warn('[normalizeMessagesForUI] Failed to normalize messages', { + error: error instanceof Error ? error.message : String(error), + }) return messages } } @@ -88,16 +91,16 @@ export function deepClone(obj: T): T { export function serializeMessagesForDB( messages: CopilotMessage[], credentialIds: Set -): any[] { +): CopilotMessage[] { const result = messages .map((msg) => { let timestamp: string = msg.timestamp if (typeof timestamp !== 'string') { - const ts = timestamp as any + const ts = timestamp as unknown timestamp = ts instanceof Date ? ts.toISOString() : new Date().toISOString() } - const serialized: any = { + const serialized: CopilotMessage = { id: msg.id, role: msg.role, content: msg.content || '', @@ -108,16 +111,16 @@ export function serializeMessagesForDB( serialized.contentBlocks = deepClone(msg.contentBlocks) } - if (Array.isArray((msg as any).toolCalls) && (msg as any).toolCalls.length > 0) { - serialized.toolCalls = deepClone((msg as any).toolCalls) + if (Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0) { + serialized.toolCalls = deepClone(msg.toolCalls) } if (Array.isArray(msg.fileAttachments) && msg.fileAttachments.length > 0) { serialized.fileAttachments = deepClone(msg.fileAttachments) } - if (Array.isArray((msg as any).contexts) && (msg as any).contexts.length > 0) { - serialized.contexts = deepClone((msg as any).contexts) + if (Array.isArray(msg.contexts) && msg.contexts.length > 0) { + serialized.contexts = deepClone(msg.contexts) } if (Array.isArray(msg.citations) && msg.citations.length > 0) { @@ -142,16 +145,16 @@ export function serializeMessagesForDB( for (const msg of messages) { if (msg.role === 'assistant') { - logger.info('[serializeMessagesForDB] Input assistant message', { + logger.debug('[serializeMessagesForDB] Input assistant message', { id: msg.id, hasContent: !!msg.content?.trim(), contentBlockCount: msg.contentBlocks?.length || 0, - contentBlockTypes: (msg.contentBlocks as any[])?.map((b) => b?.type) ?? [], + contentBlockTypes: msg.contentBlocks?.map((b) => b?.type) ?? [], }) } } - logger.info('[serializeMessagesForDB] Serialized messages', { + logger.debug('[serializeMessagesForDB] Serialized messages', { inputCount: messages.length, outputCount: result.length, sample: diff --git a/apps/sim/lib/copilot/orchestrator/persistence.ts b/apps/sim/lib/copilot/orchestrator/persistence.ts index f42d16e37..2743a51d4 100644 --- a/apps/sim/lib/copilot/orchestrator/persistence.ts +++ b/apps/sim/lib/copilot/orchestrator/persistence.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { REDIS_TOOL_CALL_PREFIX } from '@/lib/copilot/constants' import { getRedisClient } from '@/lib/core/config/redis' const logger = createLogger('CopilotOrchestratorPersistence') @@ -15,7 +16,7 @@ export async function getToolConfirmation(toolCallId: string): Promise<{ if (!redis) return null try { - const data = await redis.get(`tool_call:${toolCallId}`) + const data = await redis.get(`${REDIS_TOOL_CALL_PREFIX}${toolCallId}`) if (!data) return null return JSON.parse(data) as { status: string; message?: string; timestamp?: string } } catch (error) { diff --git a/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts b/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts index d885e9876..138b5516b 100644 --- a/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts +++ b/apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants' import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config' import { asRecord, @@ -21,15 +22,16 @@ const logger = createLogger('CopilotSseHandlers') // Normalization + dedupe helpers live in sse-utils to keep server/client in sync. -function inferToolSuccess(data: Record | undefined): { +function inferToolSuccess(data: Record | undefined): { success: boolean hasResultData: boolean hasError: boolean } { - const hasExplicitSuccess = data?.success !== undefined || data?.result?.success !== undefined - const explicitSuccess = data?.success ?? data?.result?.success + const resultObj = asRecord(data?.result) + const hasExplicitSuccess = data?.success !== undefined || resultObj.success !== undefined + const explicitSuccess = data?.success ?? resultObj.success const hasResultData = data?.result !== undefined || data?.data !== undefined - const hasError = !!data?.error || !!data?.result?.error + const hasError = !!data?.error || !!resultObj.error const success = hasExplicitSuccess ? !!explicitSuccess : hasResultData && !hasError return { success, hasResultData, hasError } } @@ -50,12 +52,12 @@ function addContentBlock(context: StreamingContext, block: Omit = { chat_id: (event, context) => { - context.chatId = asRecord(event.data).chatId + context.chatId = asRecord(event.data).chatId as string | undefined }, title_updated: () => {}, tool_result: (event, context) => { const data = getEventData(event) - const toolCallId = event.toolCallId || data?.id + const toolCallId = event.toolCallId || (data?.id as string | undefined) if (!toolCallId) return const current = context.toolCalls.get(toolCallId) if (!current) return @@ -71,23 +73,24 @@ export const sseHandlers: Record = { } } if (hasError) { - current.error = data?.error || data?.result?.error + const resultObj = asRecord(data?.result) + current.error = (data?.error || resultObj.error) as string | undefined } }, tool_error: (event, context) => { const data = getEventData(event) - const toolCallId = event.toolCallId || data?.id + const toolCallId = event.toolCallId || (data?.id as string | undefined) if (!toolCallId) return const current = context.toolCalls.get(toolCallId) if (!current) return current.status = 'error' - current.error = data?.error || 'Tool execution failed' + current.error = (data?.error as string | undefined) || 'Tool execution failed' current.endTime = Date.now() }, tool_generating: (event, context) => { const data = getEventData(event) - const toolCallId = event.toolCallId || data?.toolCallId || data?.id - const toolName = event.toolName || data?.toolName || data?.name + const toolCallId = event.toolCallId || (data?.toolCallId as string | undefined) || (data?.id as string | undefined) + const toolName = event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined) if (!toolCallId || !toolName) return if (!context.toolCalls.has(toolCallId)) { context.toolCalls.set(toolCallId, { @@ -99,12 +102,12 @@ export const sseHandlers: Record = { } }, tool_call: async (event, context, execContext, options) => { - const toolData = getEventData(event) || {} - const toolCallId = toolData.id || event.toolCallId - const toolName = toolData.name || event.toolName + const toolData = getEventData(event) || ({} as Record) + const toolCallId = (toolData.id as string | undefined) || event.toolCallId + const toolName = (toolData.name as string | undefined) || event.toolName if (!toolCallId || !toolName) return - const args = toolData.arguments || toolData.input || asRecord(event.data).input + const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record | undefined const isPartial = toolData.partial === true const existing = context.toolCalls.get(toolCallId) @@ -161,7 +164,7 @@ export const sseHandlers: Record = { const isInteractive = options.interactive === true if (isInterruptTool && isInteractive) { - const decision = await waitForToolDecision(toolCallId, options.timeout || 600000) + const decision = await waitForToolDecision(toolCallId, options.timeout || STREAM_TIMEOUT_MS, options.abortSignal) if (decision?.status === 'accepted' || decision?.status === 'success') { await executeToolAndReport(toolCallId, context, execContext, options) return @@ -221,7 +224,8 @@ export const sseHandlers: Record = { } }, reasoning: (event, context) => { - const phase = asRecord(event.data).phase || asRecord(asRecord(event.data).data).phase + const d = asRecord(event.data) + const phase = d.phase || asRecord(d.data).phase if (phase === 'start') { context.isInThinkingBlock = true context.currentThinkingBlock = { @@ -239,17 +243,16 @@ export const sseHandlers: Record = { context.currentThinkingBlock = null return } - const d = asRecord(event.data) - const chunk = typeof event.data === 'string' ? event.data : d.data || d.content + const chunk = (d.data || d.content || event.content) as string | undefined if (!chunk || !context.currentThinkingBlock) return context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}` }, content: (event, context) => { const d = asRecord(event.data) - const chunk = typeof event.data === 'string' ? event.data : d.content || d.data + const chunk = (d.content || d.data || event.content) as string | undefined if (!chunk) return context.accumulatedContent += chunk - addContentBlock(context, { type: 'text', content: chunk as string }) + addContentBlock(context, { type: 'text', content: chunk }) }, done: (event, context) => { const d = asRecord(event.data) @@ -266,7 +269,7 @@ export const sseHandlers: Record = { }, error: (event, context) => { const d = asRecord(event.data) - const message = d.message || d.error || (typeof event.data === 'string' ? event.data : null) + const message = (d.message || d.error || event.error) as string | undefined if (message) { context.errors.push(message) } @@ -278,7 +281,8 @@ export const subAgentHandlers: Record = { content: (event, context) => { const parentToolCallId = context.subAgentParentToolCallId if (!parentToolCallId || !event.data) return - const chunk = typeof event.data === 'string' ? event.data : asRecord(event.data).content || '' + const d = asRecord(event.data) + const chunk = (d.content || d.data || event.content) as string | undefined if (!chunk) return context.subAgentContent[parentToolCallId] = (context.subAgentContent[parentToolCallId] || '') + chunk @@ -287,12 +291,12 @@ export const subAgentHandlers: Record = { tool_call: async (event, context, execContext, options) => { const parentToolCallId = context.subAgentParentToolCallId if (!parentToolCallId) return - const toolData = getEventData(event) || {} - const toolCallId = toolData.id || event.toolCallId - const toolName = toolData.name || event.toolName + const toolData = getEventData(event) || ({} as Record) + const toolCallId = (toolData.id as string | undefined) || event.toolCallId + const toolName = (toolData.name as string | undefined) || event.toolName if (!toolCallId || !toolName) return const isPartial = toolData.partial === true - const args = toolData.arguments || toolData.input || asRecord(event.data).input + const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record | undefined const existing = context.toolCalls.get(toolCallId) // Ignore late/duplicate tool_call events once we already have a result. diff --git a/apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts b/apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts index 99eb593e5..1c707c570 100644 --- a/apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts +++ b/apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts @@ -1,4 +1,9 @@ import { createLogger } from '@sim/logger' +import { + TOOL_DECISION_INITIAL_POLL_MS, + TOOL_DECISION_MAX_POLL_MS, + TOOL_DECISION_POLL_BACKOFF, +} from '@/lib/copilot/constants' import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config' import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence' import { @@ -103,15 +108,20 @@ export async function executeToolAndReport( export async function waitForToolDecision( toolCallId: string, - timeoutMs: number + timeoutMs: number, + abortSignal?: AbortSignal ): Promise<{ status: string; message?: string } | null> { const start = Date.now() + let interval = TOOL_DECISION_INITIAL_POLL_MS + const maxInterval = TOOL_DECISION_MAX_POLL_MS while (Date.now() - start < timeoutMs) { + if (abortSignal?.aborted) return null const decision = await getToolConfirmation(toolCallId) if (decision?.status) { return decision } - await new Promise((resolve) => setTimeout(resolve, 100)) + await new Promise((resolve) => setTimeout(resolve, interval)) + interval = Math.min(interval * TOOL_DECISION_POLL_BACKOFF, maxInterval) } return null } diff --git a/apps/sim/lib/copilot/orchestrator/sse-utils.ts b/apps/sim/lib/copilot/orchestrator/sse-utils.ts index 26d5a94bd..92f337e2a 100644 --- a/apps/sim/lib/copilot/orchestrator/sse-utils.ts +++ b/apps/sim/lib/copilot/orchestrator/sse-utils.ts @@ -1,22 +1,29 @@ +import { STREAM_BUFFER_MAX_DEDUP_ENTRIES } from '@/lib/copilot/constants' import type { SSEEvent } from '@/lib/copilot/orchestrator/types' -type EventDataObject = Record | undefined +type EventDataObject = Record | undefined /** Safely cast event.data to a record for property access. */ -export const asRecord = (data: unknown): Record => - (data && typeof data === 'object' && !Array.isArray(data) ? data : {}) as Record - -const DEFAULT_TOOL_EVENT_TTL_MS = 5 * 60 * 1000 +export const asRecord = (data: unknown): Record => + (data && typeof data === 'object' && !Array.isArray(data) ? data : {}) as Record /** - * In-memory tool event dedupe. + * In-memory tool event dedupe with bounded size. * - * NOTE: These sets are process-local only. In a multi-instance setup (e.g., ECS), - * each task maintains its own dedupe cache, so duplicates can still appear across tasks. + * NOTE: Process-local only. In a multi-instance setup (e.g., ECS), + * each task maintains its own dedupe cache. */ const seenToolCalls = new Set() const seenToolResults = new Set() +function addToSet(set: Set, id: string): void { + if (set.size >= STREAM_BUFFER_MAX_DEDUP_ENTRIES) { + const first = set.values().next().value + if (first) set.delete(first) + } + set.add(id) +} + const parseEventData = (data: unknown): EventDataObject => { if (!data) return undefined if (typeof data !== 'string') { @@ -51,7 +58,7 @@ export const getEventData = (event: SSEEvent): EventDataObject => { function getToolCallIdFromEvent(event: SSEEvent): string | undefined { const data = getEventData(event) - return event.toolCallId || data?.id || data?.toolCallId + return event.toolCallId || (data?.id as string | undefined) || (data?.toolCallId as string | undefined) } /** Normalizes SSE events so tool metadata is available at the top level. */ @@ -59,9 +66,9 @@ export function normalizeSseEvent(event: SSEEvent): SSEEvent { if (!event) return event const data = getEventData(event) if (!data) return event - const toolCallId = event.toolCallId || data.id || data.toolCallId - const toolName = event.toolName || data.name || data.toolName - const success = event.success ?? data.success + const toolCallId = event.toolCallId || (data.id as string | undefined) || (data.toolCallId as string | undefined) + const toolName = event.toolName || (data.name as string | undefined) || (data.toolName as string | undefined) + const success = event.success ?? (data.success as boolean | undefined) const result = event.result ?? data.result const normalizedData = typeof event.data === 'string' ? data : event.data return { @@ -74,25 +81,16 @@ export function normalizeSseEvent(event: SSEEvent): SSEEvent { } } -function markToolCallSeen(toolCallId: string, ttlMs: number = DEFAULT_TOOL_EVENT_TTL_MS): void { - seenToolCalls.add(toolCallId) - setTimeout(() => { - seenToolCalls.delete(toolCallId) - }, ttlMs) +function markToolCallSeen(toolCallId: string): void { + addToSet(seenToolCalls, toolCallId) } function wasToolCallSeen(toolCallId: string): boolean { return seenToolCalls.has(toolCallId) } -export function markToolResultSeen( - toolCallId: string, - ttlMs: number = DEFAULT_TOOL_EVENT_TTL_MS -): void { - seenToolResults.add(toolCallId) - setTimeout(() => { - seenToolResults.delete(toolCallId) - }, ttlMs) +export function markToolResultSeen(toolCallId: string): void { + addToSet(seenToolResults, toolCallId) } export function wasToolResultSeen(toolCallId: string): boolean { diff --git a/apps/sim/lib/copilot/orchestrator/stream-buffer.ts b/apps/sim/lib/copilot/orchestrator/stream-buffer.ts index abf70aa2c..bc0524c4a 100644 --- a/apps/sim/lib/copilot/orchestrator/stream-buffer.ts +++ b/apps/sim/lib/copilot/orchestrator/stream-buffer.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { REDIS_COPILOT_STREAM_PREFIX } from '@/lib/copilot/constants' import { env } from '@/lib/core/config/env' import { getRedisClient } from '@/lib/core/config/redis' @@ -59,7 +60,7 @@ return id ` function getStreamKeyPrefix(streamId: string) { - return `copilot_stream:${streamId}` + return `${REDIS_COPILOT_STREAM_PREFIX}${streamId}` } function getEventsKey(streamId: string) { @@ -86,11 +87,11 @@ export type StreamMeta = { export type StreamEventEntry = { eventId: number streamId: string - event: Record + event: Record } export type StreamEventWriter = { - write: (event: Record) => Promise + write: (event: Record) => Promise flush: () => Promise close: () => Promise } @@ -147,7 +148,7 @@ export async function getStreamMeta(streamId: string): Promise + event: Record ): Promise { const redis = getRedisClient() if (!redis) { @@ -225,7 +226,7 @@ export function createStreamEventWriter(streamId: string): StreamEventWriter { zaddArgs.push(entry.eventId, JSON.stringify(entry)) } const pipeline = redis.pipeline() - pipeline.zadd(key, ...(zaddArgs as any)) + pipeline.zadd(key, ...(zaddArgs as [number, string])) pipeline.expire(key, config.ttlSeconds) pipeline.expire(getSeqKey(streamId), config.ttlSeconds) pipeline.zremrangebyrank(key, 0, -config.eventLimit - 1) @@ -253,7 +254,7 @@ export function createStreamEventWriter(streamId: string): StreamEventWriter { } } - const write = async (event: Record) => { + const write = async (event: Record) => { if (closed) return { eventId: 0, streamId, event } if (nextEventId === 0 || nextEventId > maxReservedId) { await reserveIds(1) diff --git a/apps/sim/lib/copilot/orchestrator/stream-core.ts b/apps/sim/lib/copilot/orchestrator/stream-core.ts index 5f5af90b4..14357c204 100644 --- a/apps/sim/lib/copilot/orchestrator/stream-core.ts +++ b/apps/sim/lib/copilot/orchestrator/stream-core.ts @@ -1,4 +1,5 @@ import { createLogger } from '@sim/logger' +import { ORCHESTRATION_TIMEOUT_MS } from '@/lib/copilot/constants' import { handleSubagentRouting, sseHandlers, @@ -68,7 +69,7 @@ export async function runStreamLoop( execContext: ExecutionContext, options: StreamLoopOptions ): Promise { - const { timeout = 300000, abortSignal } = options + const { timeout = ORCHESTRATION_TIMEOUT_MS, abortSignal } = options const response = await fetch(fetchUrl, { ...fetchOptions, diff --git a/apps/sim/lib/copilot/orchestrator/subagent.ts b/apps/sim/lib/copilot/orchestrator/subagent.ts index 9788a686a..cccf7a70b 100644 --- a/apps/sim/lib/copilot/orchestrator/subagent.ts +++ b/apps/sim/lib/copilot/orchestrator/subagent.ts @@ -28,7 +28,7 @@ export interface SubagentOrchestratorResult { structuredResult?: { type?: string summary?: string - data?: any + data?: unknown success?: boolean } error?: string @@ -37,14 +37,15 @@ export interface SubagentOrchestratorResult { export async function orchestrateSubagentStream( agentId: string, - requestPayload: Record, + requestPayload: Record, options: SubagentOrchestratorOptions ): Promise { const { userId, workflowId, workspaceId } = options const execContext = await buildExecutionContext(userId, workflowId, workspaceId) + const msgId = requestPayload?.messageId const context = createStreamingContext({ - messageId: requestPayload?.messageId || crypto.randomUUID(), + messageId: typeof msgId === 'string' ? msgId : crypto.randomUUID(), }) let structuredResult: SubagentOrchestratorResult['structuredResult'] @@ -109,12 +110,12 @@ export async function orchestrateSubagentStream( function normalizeStructuredResult(data: unknown): SubagentOrchestratorResult['structuredResult'] { if (!data || typeof data !== 'object') return undefined - const d = data as Record + const d = data as Record return { - type: d.result_type || d.type, - summary: d.summary, + type: (d.result_type || d.type) as string | undefined, + summary: d.summary as string | undefined, data: d.data ?? d, - success: d.success, + success: d.success as boolean | undefined, } } diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts index 555552693..dc5d7a988 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/deployment-tools/manage.ts @@ -58,7 +58,17 @@ export async function executeCheckDeploymentStatus( hasPassword: Boolean(chatDeploy[0]?.password), } - const mcpDetails = { isDeployed: false, servers: [] as any[] } + const mcpDetails: { + isDeployed: boolean + servers: Array<{ + serverId: string + serverName: string + toolName: string + toolDescription: string | null + parameterSchema: unknown + toolId: string + }> + } = { isDeployed: false, servers: [] } if (workspaceId) { const servers = await db .select({ diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts index 2882a8bbf..dbd3a24a9 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/index.ts @@ -138,7 +138,7 @@ export async function executeToolServerSide( */ async function executeServerToolDirect( toolName: string, - params: Record, + params: Record, context: ExecutionContext ): Promise { try { @@ -180,8 +180,8 @@ export async function markToolComplete( toolCallId: string, toolName: string, status: number, - message?: any, - data?: any + message?: unknown, + data?: unknown ): Promise { try { const response = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, { diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts index f70444acd..8464e42ca 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/integration-tools.ts @@ -41,9 +41,9 @@ export async function executeIntegrationToolDirect( // Deep resolution walks nested objects to replace {{ENV_VAR}} references. // Safe because tool arguments originate from the LLM (not direct user input) // and env vars belong to the user themselves. - const executionParams: Record = resolveEnvVarReferences(toolArgs, decryptedEnvVars, { + const executionParams = resolveEnvVarReferences(toolArgs, decryptedEnvVars, { deep: true, - }) as Record + }) as Record if (toolConfig.oauth?.required && toolConfig.oauth.provider) { const provider = toolConfig.oauth.provider @@ -62,7 +62,7 @@ export async function executeIntegrationToolDirect( const acc = accounts[0] const requestId = generateRequestId() - const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id) + const { accessToken } = await refreshTokenIfNeeded(requestId, acc, acc.id) if (!accessToken) { return { diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts index 12158fc74..148928694 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/mutations.ts @@ -1,4 +1,5 @@ import crypto from 'crypto' +import { createLogger } from '@sim/logger' import { db } from '@sim/db' import { workflow, workflowFolder } from '@sim/db/schema' import { and, eq, isNull, max } from 'drizzle-orm' @@ -16,6 +17,8 @@ import type { VariableOperation, } from '../param-types' +const logger = createLogger('WorkflowMutations') + export async function executeCreateWorkflow( params: CreateWorkflowParams, context: ExecutionContext @@ -185,17 +188,27 @@ export async function executeSetGlobalWorkflowVariables( : [] const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId) - const currentVarsRecord = (workflowRecord.variables as Record) || {} - const byName: Record = {} - Object.values(currentVarsRecord).forEach((v: any) => { - if (v && typeof v === 'object' && v.id && v.name) byName[String(v.name)] = v + interface WorkflowVariable { + id: string + workflowId?: string + name: string + type: string + value?: unknown + } + const currentVarsRecord = (workflowRecord.variables as Record) || {} + const byName: Record = {} + Object.values(currentVarsRecord).forEach((v) => { + if (v && typeof v === 'object' && 'id' in v && 'name' in v) { + const variable = v as WorkflowVariable + byName[String(variable.name)] = variable + } }) for (const op of operations) { const key = String(op?.name || '') if (!key) continue const nextType = op?.type || byName[key]?.type || 'plain' - const coerceValue = (value: any, type: string) => { + const coerceValue = (value: unknown, type: string): unknown => { if (value === undefined) return value if (type === 'number') { const n = Number(value) @@ -213,7 +226,9 @@ export async function executeSetGlobalWorkflowVariables( if (type === 'array' && Array.isArray(parsed)) return parsed if (type === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed)) return parsed - } catch {} + } catch (error) { + logger.warn('Failed to parse JSON value for variable coercion', { error: error instanceof Error ? error.message : String(error) }) + } return value } return value @@ -254,7 +269,7 @@ export async function executeSetGlobalWorkflowVariables( } const nextVarsRecord = Object.fromEntries( - Object.values(byName).map((v: any) => [String(v.id), v]) + Object.values(byName).map((v) => [String(v.id), v]) ) await db diff --git a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts index 5bcca2e0d..dd4231b97 100644 --- a/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts +++ b/apps/sim/lib/copilot/orchestrator/tool-executor/workflow-tools/queries.ts @@ -13,6 +13,7 @@ import { getBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs' import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers' +import type { Loop, Parallel } from '@/stores/workflows/workflow/types' import { normalizeName } from '@/executor/constants' import { ensureWorkflowAccess, @@ -209,12 +210,15 @@ export async function executeGetWorkflowData( ) if (dataType === 'global_variables') { - const variablesRecord = (workflowRecord.variables as Record) || {} - const variables = Object.values(variablesRecord).map((v: any) => ({ - id: String(v?.id || ''), - name: String(v?.name || ''), - value: v?.value, - })) + const variablesRecord = (workflowRecord.variables as Record) || {} + const variables = Object.values(variablesRecord).map((v) => { + const variable = v as Record | null + return { + id: String(variable?.id || ''), + name: String(variable?.name || ''), + value: variable?.value, + } + }) return { success: true, output: { variables } } } @@ -232,13 +236,17 @@ export async function executeGetWorkflowData( .where(or(...conditions)) .orderBy(desc(customTools.createdAt)) - const customToolsData = toolsRows.map((tool) => ({ - id: String(tool.id || ''), - title: String(tool.title || ''), - functionName: String((tool.schema as any)?.function?.name || ''), - description: String((tool.schema as any)?.function?.description || ''), - parameters: (tool.schema as any)?.function?.parameters, - })) + const customToolsData = toolsRows.map((tool) => { + const schema = tool.schema as Record | null + const fn = (schema?.function ?? {}) as Record + return { + id: String(tool.id || ''), + title: String(tool.title || ''), + functionName: String(fn.name || ''), + description: String(fn.description || ''), + parameters: fn.parameters, + } + }) return { success: true, output: { customTools: customToolsData } } } @@ -377,10 +385,28 @@ export async function executeGetBlockUpstreamReferences( const loops = normalized.loops || {} const parallels = normalized.parallels || {} - const graphEdges = edges.map((edge: any) => ({ source: edge.source, target: edge.target })) + const graphEdges = edges.map((edge) => ({ source: edge.source, target: edge.target })) const variableOutputs = await getWorkflowVariablesForTool(workflowId) - const results: any[] = [] + interface AccessibleBlockEntry { + blockId: string + blockName: string + blockType: string + outputs: string[] + triggerMode?: boolean + accessContext?: 'inside' | 'outside' + } + + interface UpstreamReferenceResult { + blockId: string + blockName: string + blockType: string + accessibleBlocks: AccessibleBlockEntry[] + insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }> + variables: Array<{ id: string; name: string; type: string; tag: string }> + } + + const results: UpstreamReferenceResult[] = [] for (const blockId of params.blockIds) { const targetBlock = blocks[blockId] @@ -390,7 +416,7 @@ export async function executeGetBlockUpstreamReferences( const containingLoopIds = new Set() const containingParallelIds = new Set() - Object.values(loops as Record).forEach((loop) => { + Object.values(loops).forEach((loop) => { if (loop?.nodes?.includes(blockId)) { containingLoopIds.add(loop.id) const loopBlock = blocks[loop.id] @@ -404,7 +430,7 @@ export async function executeGetBlockUpstreamReferences( } }) - Object.values(parallels as Record).forEach((parallel) => { + Object.values(parallels).forEach((parallel) => { if (parallel?.nodes?.includes(blockId)) { containingParallelIds.add(parallel.id) const parallelBlock = blocks[parallel.id] @@ -422,9 +448,9 @@ export async function executeGetBlockUpstreamReferences( const accessibleIds = new Set(ancestorIds) accessibleIds.add(blockId) - const starterBlock = Object.values(blocks).find((b: any) => isInputDefinitionTrigger(b.type)) - if (starterBlock && ancestorIds.includes((starterBlock as any).id)) { - accessibleIds.add((starterBlock as any).id) + const starterBlock = Object.values(blocks).find((b) => isInputDefinitionTrigger(b.type)) + if (starterBlock && ancestorIds.includes(starterBlock.id)) { + accessibleIds.add(starterBlock.id) } containingLoopIds.forEach((loopId) => { @@ -437,7 +463,7 @@ export async function executeGetBlockUpstreamReferences( parallels[parallelId]?.nodes?.forEach((nodeId: string) => accessibleIds.add(nodeId)) }) - const accessibleBlocks: any[] = [] + const accessibleBlocks: AccessibleBlockEntry[] = [] for (const accessibleBlockId of accessibleIds) { const block = blocks[accessibleBlockId] @@ -462,14 +488,14 @@ export async function executeGetBlockUpstreamReferences( } const formattedOutputs = formatOutputsWithPrefix(outputPaths, blockName) - const entry: any = { + const entry: AccessibleBlockEntry = { blockId: accessibleBlockId, blockName, blockType: block.type, outputs: formattedOutputs, + ...(block.triggerMode ? { triggerMode: true } : {}), + ...(accessContext ? { accessContext } : {}), } - if (block.triggerMode) entry.triggerMode = true - if (accessContext) entry.accessContext = accessContext accessibleBlocks.push(entry) } @@ -499,10 +525,14 @@ async function getWorkflowVariablesForTool( .where(eq(workflow.id, workflowId)) .limit(1) - const variablesRecord = (workflowRecord?.variables as Record) || {} + const variablesRecord = (workflowRecord?.variables as Record) || {} return Object.values(variablesRecord) - .filter((v: any) => v?.name && String(v.name).trim() !== '') - .map((v: any) => ({ + .filter((v): v is Record => { + if (!v || typeof v !== 'object') return false + const variable = v as Record + return !!variable.name && String(variable.name).trim() !== '' + }) + .map((v) => ({ id: String(v.id || ''), name: String(v.name || ''), type: String(v.type || 'plain'), @@ -513,8 +543,8 @@ async function getWorkflowVariablesForTool( function getSubflowInsidePaths( blockType: 'loop' | 'parallel', blockId: string, - loops: Record, - parallels: Record + loops: Record, + parallels: Record ): string[] { const paths = ['index'] if (blockType === 'loop') { diff --git a/apps/sim/lib/copilot/orchestrator/types.ts b/apps/sim/lib/copilot/orchestrator/types.ts index dd321bab3..eebc806a7 100644 --- a/apps/sim/lib/copilot/orchestrator/types.ts +++ b/apps/sim/lib/copilot/orchestrator/types.ts @@ -19,12 +19,24 @@ export type SSEEventType = export interface SSEEvent { type: SSEEventType - data?: unknown + data?: Record subagent?: string toolCallId?: string toolName?: string success?: boolean result?: unknown + /** Set on chat_id events */ + chatId?: string + /** Set on title_updated events */ + title?: string + /** Set on error events */ + error?: string + /** Set on content/reasoning events */ + content?: string + /** Set on reasoning events */ + phase?: string + /** Set on tool_result events */ + failedDependency?: boolean } export type ToolCallStatus = 'pending' | 'executing' | 'success' | 'error' | 'skipped' | 'rejected' diff --git a/apps/sim/lib/copilot/store-utils.ts b/apps/sim/lib/copilot/store-utils.ts index 6c2cfcc4b..86d310503 100644 --- a/apps/sim/lib/copilot/store-utils.ts +++ b/apps/sim/lib/copilot/store-utils.ts @@ -1,3 +1,4 @@ +import { createLogger } from '@sim/logger' import { Loader2 } from 'lucide-react' import { ClientToolCallState, @@ -6,6 +7,12 @@ import { } from '@/lib/copilot/tools/client/tool-display-registry' import type { CopilotStore } from '@/stores/panel/copilot/types' +const logger = createLogger('CopilotStoreUtils') + +type StoreSet = ( + partial: Partial | ((state: CopilotStore) => Partial) +) => void + export function resolveToolDisplay( toolName: string | undefined, state: ClientToolCallState, @@ -80,7 +87,7 @@ export function isTerminalState(state: string): boolean { } export function abortAllInProgressTools( - set: any, + set: StoreSet, get: () => CopilotStore ) { try { @@ -89,7 +96,7 @@ export function abortAllInProgressTools( const abortedIds = new Set() let hasUpdates = false for (const [id, tc] of Object.entries(toolCallsById)) { - const st = tc.state as any + const st = tc.state const isTerminal = st === ClientToolCallState.success || st === ClientToolCallState.error || @@ -101,7 +108,7 @@ export function abortAllInProgressTools( ...tc, state: ClientToolCallState.aborted, subAgentStreaming: false, - display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, (tc as any).params), + display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, tc.params), } hasUpdates = true } else if (tc.subAgentStreaming) { @@ -117,7 +124,7 @@ export function abortAllInProgressTools( set((s: CopilotStore) => { const msgs = [...s.messages] for (let mi = msgs.length - 1; mi >= 0; mi--) { - const m = msgs[mi] as any + const m = msgs[mi] if (m.role !== 'assistant' || !Array.isArray(m.contentBlocks)) continue let changed = false const blocks = m.contentBlocks.map((b: any) => { @@ -148,7 +155,33 @@ export function abortAllInProgressTools( return { messages: msgs } }) } - } catch {} + } catch (error) { + logger.warn('Failed to abort in-progress tools', { + error: error instanceof Error ? error.message : String(error), + }) + } +} + +export function cleanupActiveState( + set: (partial: Record) => void, + get: () => Record +): void { + abortAllInProgressTools( + set as unknown as StoreSet, + get as unknown as () => CopilotStore + ) + try { + const { useWorkflowDiffStore } = require('@/stores/workflow-diff/store') as { + useWorkflowDiffStore: { + getState: () => { clearDiff: (options?: { restoreBaseline?: boolean }) => void } + } + } + useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) + } catch (error) { + logger.warn('Failed to clear diff during cleanup', { + error: error instanceof Error ? error.message : String(error), + }) + } } export function stripTodoTags(text: string): string { diff --git a/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts b/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts index 7b945d6b0..6699496e7 100644 --- a/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts +++ b/apps/sim/lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts @@ -288,7 +288,9 @@ export const getBlocksMetadataServerTool: BaseServerTool< if (existsSync(docPath)) { metadata.yamlDocumentation = readFileSync(docPath, 'utf-8') } - } catch {} + } catch (error) { + logger.warn('Failed to read YAML documentation file', { error: error instanceof Error ? error.message : String(error) }) + } if (metadata) { result[blockId] = removeNullish(metadata) as CopilotBlockMetadata @@ -951,7 +953,10 @@ function resolveToolIdForOperation(blockConfig: BlockConfig, opId: string): stri const maybeToolId = toolSelector({ operation: opId }) if (typeof maybeToolId === 'string') return maybeToolId } - } catch {} + } catch (error) { + const toolLogger = createLogger('GetBlocksMetadataServerTool') + toolLogger.warn('Failed to resolve tool ID for operation', { error: error instanceof Error ? error.message : String(error) }) + } return undefined } diff --git a/apps/sim/lib/copilot/tools/server/user/get-credentials.ts b/apps/sim/lib/copilot/tools/server/user/get-credentials.ts index 5aafc2dcb..78911bd80 100644 --- a/apps/sim/lib/copilot/tools/server/user/get-credentials.ts +++ b/apps/sim/lib/copilot/tools/server/user/get-credentials.ts @@ -89,7 +89,9 @@ export const getCredentialsServerTool: BaseServerTool try { const decoded = jwtDecode<{ email?: string; name?: string }>(acc.idToken) displayName = decoded.email || decoded.name || '' - } catch {} + } catch (error) { + logger.warn('Failed to decode JWT id token', { error: error instanceof Error ? error.message : String(error) }) + } } if (!displayName && baseProvider === 'github') displayName = `${acc.accountId} (GitHub)` if (!displayName && userEmail) displayName = userEmail @@ -107,7 +109,9 @@ export const getCredentialsServerTool: BaseServerTool acc.id ) accessToken = refreshedToken || accessToken - } catch {} + } catch (error) { + logger.warn('Failed to refresh OAuth access token', { error: error instanceof Error ? error.message : String(error) }) + } connectedCredentials.push({ id: acc.id, name: displayName, diff --git a/apps/sim/lib/copilot/tools/server/workflow/get-workflow-console.ts b/apps/sim/lib/copilot/tools/server/workflow/get-workflow-console.ts index 601a17c0a..06cfb1c82 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/get-workflow-console.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/get-workflow-console.ts @@ -4,6 +4,8 @@ import { createLogger } from '@sim/logger' import { desc, eq } from 'drizzle-orm' import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool' +const logger = createLogger('GetWorkflowConsoleServerTool') + interface GetWorkflowConsoleArgs { workflowId: string limit?: number @@ -87,7 +89,9 @@ function normalizeErrorMessage(errorValue: unknown): string | undefined { if (typeof errorValue === 'object') { try { return JSON.stringify(errorValue) - } catch {} + } catch (error) { + logger.warn('Failed to stringify error value', { error: error instanceof Error ? error.message : String(error) }) + } } try { return String(errorValue) @@ -217,7 +221,6 @@ function deriveExecutionErrorSummary(params: { export const getWorkflowConsoleServerTool: BaseServerTool = { name: 'get_workflow_console', async execute(rawArgs: GetWorkflowConsoleArgs): Promise { - const logger = createLogger('GetWorkflowConsoleServerTool') const { workflowId, limit = 2, diff --git a/apps/sim/stores/panel/copilot/store.ts b/apps/sim/stores/panel/copilot/store.ts index 694123c5f..fda773320 100644 --- a/apps/sim/stores/panel/copilot/store.ts +++ b/apps/sim/stores/panel/copilot/store.ts @@ -16,19 +16,36 @@ import { stripContinueOptionFromBlocks, } from '@/lib/copilot/client-sse/content-blocks' import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers' -import type { StreamingContext } from '@/lib/copilot/client-sse/types' +import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types' +import { + COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, + COPILOT_CHAT_API_PATH, + COPILOT_CHAT_STREAM_API_PATH, + COPILOT_CHECKPOINTS_API_PATH, + COPILOT_CHECKPOINTS_REVERT_API_PATH, + COPILOT_CONFIRM_API_PATH, + COPILOT_CREDENTIALS_API_PATH, + COPILOT_DELETE_CHAT_API_PATH, + MAX_RESUME_ATTEMPTS, + OPTIMISTIC_TITLE_MAX_LENGTH, + QUEUE_PROCESS_DELAY_MS, + STREAM_STORAGE_KEY, + STREAM_TIMEOUT_MS, + SUBSCRIPTION_INVALIDATE_DELAY_MS, +} from '@/lib/copilot/constants' import { buildCheckpointWorkflowState, buildToolCallsById, normalizeMessagesForUI, + persistMessages, saveMessageCheckpoint, - serializeMessagesForDB, } from '@/lib/copilot/messages' import type { CopilotTransportMode } from '@/lib/copilot/models' import { parseSSEStream } from '@/lib/copilot/orchestrator/sse-parser' import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry' import { abortAllInProgressTools, + cleanupActiveState, isRejectedState, isTerminalState, resolveToolDisplay, @@ -38,6 +55,7 @@ import { getQueryClient } from '@/app/_shell/providers/query-provider' import { subscriptionKeys } from '@/hooks/queries/subscription' import type { ChatContext, + CheckpointEntry, CopilotMessage, CopilotStore, CopilotStreamInfo, @@ -51,24 +69,25 @@ import type { WorkflowState } from '@/stores/workflows/workflow/types' const logger = createLogger('CopilotStore') -const STREAM_STORAGE_KEY = 'copilot_active_stream' - /** * Flag set on beforeunload to suppress continue option during page refresh/close. - * Aborts during unload should NOT show the continue button. + * Initialized once when the store module loads. */ -let isPageUnloading = false +let _isPageUnloading = false if (typeof window !== 'undefined') { window.addEventListener('beforeunload', () => { - isPageUnloading = true + _isPageUnloading = true }) } +function isPageUnloading(): boolean { + return _isPageUnloading +} function readActiveStreamFromStorage(): CopilotStreamInfo | null { if (typeof window === 'undefined') return null try { const raw = window.sessionStorage.getItem(STREAM_STORAGE_KEY) - logger.info('[Copilot] Reading stream from storage', { + logger.debug('[Copilot] Reading stream from storage', { hasRaw: !!raw, rawPreview: raw ? raw.substring(0, 100) : null, }) @@ -85,8 +104,8 @@ function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void { if (typeof window === 'undefined') return try { if (!info) { - logger.info('[Copilot] Clearing stream from storage', { - isPageUnloading, + logger.debug('[Copilot] Clearing stream from storage', { + isPageUnloading: isPageUnloading(), stack: new Error().stack?.split('\n').slice(1, 4).join(' <- '), }) window.sessionStorage.removeItem(STREAM_STORAGE_KEY) @@ -95,7 +114,7 @@ function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void { const payload = JSON.stringify(info) window.sessionStorage.setItem(STREAM_STORAGE_KEY, payload) const verified = window.sessionStorage.getItem(STREAM_STORAGE_KEY) === payload - logger.info('[Copilot] Writing stream to storage', { + logger.debug('[Copilot] Writing stream to storage', { streamId: info.streamId, lastEventId: info.lastEventId, userMessageContent: info.userMessageContent?.slice(0, 30), @@ -120,23 +139,35 @@ function updateActiveStreamEventId( writeActiveStreamToStorage(next) } -// On module load, clear any lingering diff preview (fresh page refresh) -try { - const diffStore = useWorkflowDiffStore.getState() - if (diffStore?.hasActiveDiff) { - diffStore.clearDiff() +/** + * Clear any lingering diff preview from a previous session. + * Called lazily when the store is first activated (setWorkflowId). + */ +let _initialDiffCleared = false +function clearInitialDiffIfNeeded(): void { + if (_initialDiffCleared) return + _initialDiffCleared = true + try { + const diffStore = useWorkflowDiffStore.getState() + if (diffStore?.hasActiveDiff) { + diffStore.clearDiff() + } + } catch (error) { + logger.warn('[Copilot] Failed to clear initial diff state', { + error: error instanceof Error ? error.message : String(error), + }) } -} catch {} +} const TEXT_BLOCK_TYPE = 'text' const CONTINUE_OPTIONS_TAG = '{"1":"Continue"}' -function cloneContentBlocks(blocks: any[]): any[] { +function cloneContentBlocks(blocks: ClientContentBlock[]): ClientContentBlock[] { if (!Array.isArray(blocks)) return [] return blocks.map((block) => (block ? { ...block } : block)) } -function extractTextFromBlocks(blocks: any[]): string { +function extractTextFromBlocks(blocks: ClientContentBlock[]): string { if (!Array.isArray(blocks)) return '' return blocks .filter((block) => block?.type === TEXT_BLOCK_TYPE && typeof block.content === 'string') @@ -144,7 +175,7 @@ function extractTextFromBlocks(blocks: any[]): string { .join('') } -function appendTextToBlocks(blocks: any[], text: string): any[] { +function appendTextToBlocks(blocks: ClientContentBlock[], text: string): ClientContentBlock[] { const nextBlocks = cloneContentBlocks(blocks) if (!text) return nextBlocks const lastIndex = nextBlocks.length - 1 @@ -158,14 +189,14 @@ function appendTextToBlocks(blocks: any[], text: string): any[] { return nextBlocks } -function findLastTextBlock(blocks: any[]): any | null { +function findLastTextBlock(blocks: ClientContentBlock[]): ClientContentBlock | null { if (!Array.isArray(blocks) || blocks.length === 0) return null const lastBlock = blocks[blocks.length - 1] return lastBlock?.type === TEXT_BLOCK_TYPE ? lastBlock : null } -function replaceTextBlocks(blocks: any[], text: string): any[] { - const next: any[] = [] +function replaceTextBlocks(blocks: ClientContentBlock[], text: string): ClientContentBlock[] { + const next: ClientContentBlock[] = [] let inserted = false for (const block of blocks ?? []) { if (block?.type === TEXT_BLOCK_TYPE) { @@ -183,7 +214,7 @@ function replaceTextBlocks(blocks: any[], text: string): any[] { return next } -function createStreamingContext(messageId: string): StreamingContext { +function createClientStreamingContext(messageId: string): ClientStreamingContext { return { messageId, accumulatedContent: '', @@ -201,6 +232,639 @@ function createStreamingContext(messageId: string): StreamingContext { } } +type CopilotSet = ( + partial: Partial | ((state: CopilotStore) => Partial) +) => void + +type CopilotGet = () => CopilotStore + +interface SendMessageOptionsInput { + stream?: boolean + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + messageId?: string + queueIfBusy?: boolean +} + +interface PreparedSendContext { + workflowId: string + currentChat: CopilotChat | null + mode: CopilotStore['mode'] + message: string + stream: boolean + fileAttachments?: MessageFileAttachment[] + contexts?: ChatContext[] + userMessage: CopilotMessage + streamingMessage: CopilotMessage + nextAbortController: AbortController +} + +type InitiateStreamResult = + | { kind: 'success'; result: Awaited> } + | { kind: 'error'; error: unknown } + +function prepareSendContext( + get: CopilotGet, + set: CopilotSet, + message: string, + options: SendMessageOptionsInput +): PreparedSendContext | null { + const { + workflowId, + currentChat, + mode, + revertState, + isSendingMessage, + abortController: activeAbortController, + } = get() + const { + stream = true, + fileAttachments, + contexts, + messageId, + queueIfBusy = true, + } = options + + if (!workflowId) return null + + if (isSendingMessage && !activeAbortController) { + logger.warn('[Copilot] sendMessage: stale sending state detected, clearing', { + originalMessageId: messageId, + }) + set({ isSendingMessage: false }) + } else if (isSendingMessage && activeAbortController?.signal.aborted) { + logger.warn('[Copilot] sendMessage: aborted controller detected, clearing', { + originalMessageId: messageId, + }) + set({ isSendingMessage: false, abortController: null }) + } else if (isSendingMessage) { + if (queueIfBusy) { + get().addToQueue(message, { fileAttachments, contexts, messageId }) + logger.info('[Copilot] Message queued (already sending)', { + queueLength: get().messageQueue.length + 1, + originalMessageId: messageId, + }) + return null + } + get().abortMessage({ suppressContinueOption: true }) + } + + const nextAbortController = new AbortController() + set({ isSendingMessage: true, error: null, abortController: nextAbortController }) + + const userMessage = createUserMessage(message, fileAttachments, contexts, messageId) + const streamingMessage = createStreamingMessage() + const snapshot = workflowId ? buildCheckpointWorkflowState(workflowId) : null + if (snapshot) { + set((state) => ({ + messageSnapshots: { ...state.messageSnapshots, [userMessage.id]: snapshot }, + })) + } + + get() + .loadSensitiveCredentialIds() + .catch((err) => { + logger.warn('[Copilot] Failed to load sensitive credential IDs', err) + }) + get() + .loadAutoAllowedTools() + .catch((err) => { + logger.warn('[Copilot] Failed to load auto-allowed tools', err) + }) + + let newMessages: CopilotMessage[] + if (revertState) { + const currentMessages = get().messages + newMessages = [...currentMessages, userMessage, streamingMessage] + set({ revertState: null, inputValue: '' }) + } else { + const currentMessages = get().messages + const existingIndex = messageId ? currentMessages.findIndex((m) => m.id === messageId) : -1 + if (existingIndex !== -1) { + newMessages = [...currentMessages.slice(0, existingIndex), userMessage, streamingMessage] + } else { + newMessages = [...currentMessages, userMessage, streamingMessage] + } + } + + const isFirstMessage = get().messages.length === 0 && !currentChat?.title + set({ + messages: newMessages, + currentUserMessageId: userMessage.id, + }) + + const activeStream: CopilotStreamInfo = { + streamId: userMessage.id, + workflowId, + chatId: currentChat?.id, + userMessageId: userMessage.id, + assistantMessageId: streamingMessage.id, + lastEventId: 0, + resumeAttempts: 0, + userMessageContent: message, + fileAttachments, + contexts, + startedAt: Date.now(), + } + logger.info('[Copilot] Creating new active stream', { + streamId: activeStream.streamId, + workflowId: activeStream.workflowId, + chatId: activeStream.chatId, + userMessageContent: message.slice(0, 50), + }) + set({ activeStream }) + writeActiveStreamToStorage(activeStream) + + if (isFirstMessage) { + const optimisticTitle = + message.length > OPTIMISTIC_TITLE_MAX_LENGTH + ? `${message.substring(0, OPTIMISTIC_TITLE_MAX_LENGTH - 3)}...` + : message + set((state) => ({ + currentChat: state.currentChat ? { ...state.currentChat, title: optimisticTitle } : state.currentChat, + chats: state.currentChat + ? state.chats.map((c) => (c.id === state.currentChat!.id ? { ...c, title: optimisticTitle } : c)) + : state.chats, + })) + } + + return { + workflowId, + currentChat, + mode, + message, + stream, + fileAttachments, + contexts, + userMessage, + streamingMessage, + nextAbortController, + } +} + +async function initiateStream( + prepared: PreparedSendContext, + get: CopilotGet +): Promise { + try { + const { contexts, mode } = prepared + logger.debug('sendMessage: preparing request', { + hasContexts: Array.isArray(contexts), + contextsCount: Array.isArray(contexts) ? contexts.length : 0, + contextsPreview: Array.isArray(contexts) + ? contexts.map((c) => ({ + kind: c?.kind, + chatId: c?.kind === 'past_chat' ? c.chatId : undefined, + workflowId: + c?.kind === 'workflow' || c?.kind === 'current_workflow' || c?.kind === 'workflow_block' + ? c.workflowId + : undefined, + label: c?.label, + })) + : undefined, + }) + + const { streamingPlanContent } = get() + let messageToSend = prepared.message + if (streamingPlanContent?.trim()) { + messageToSend = `Design Document:\n\n${streamingPlanContent}\n\n==============\n\nUser Query:\n\n${prepared.message}` + logger.debug('[DesignDocument] Prepending plan content to message', { + planLength: streamingPlanContent.length, + originalMessageLength: prepared.message.length, + finalMessageLength: messageToSend.length, + }) + } + + const apiMode: CopilotTransportMode = mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent' + const uiToApiCommandMap: Record = { actions: 'superagent' } + const commands = contexts + ?.filter((c) => c.kind === 'slash_command' && 'command' in c) + .map((c) => { + const uiCommand = c.command.toLowerCase() + return uiToApiCommandMap[uiCommand] || uiCommand + }) as string[] | undefined + const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command') + + const result = await sendStreamingMessage({ + message: messageToSend, + userMessageId: prepared.userMessage.id, + chatId: prepared.currentChat?.id, + workflowId: prepared.workflowId || undefined, + mode: apiMode, + model: get().selectedModel, + prefetch: get().agentPrefetch, + createNewChat: !prepared.currentChat, + stream: prepared.stream, + fileAttachments: prepared.fileAttachments, + contexts: filteredContexts, + commands: commands?.length ? commands : undefined, + abortSignal: prepared.nextAbortController.signal, + }) + + return { kind: 'success', result } + } catch (error) { + return { kind: 'error', error } + } +} + +async function processStreamEvents( + initiated: InitiateStreamResult, + prepared: PreparedSendContext, + get: CopilotGet +): Promise { + if (initiated.kind !== 'success') return false + if (!initiated.result.success || !initiated.result.stream) return false + await get().handleStreamingResponse( + initiated.result.stream, + prepared.streamingMessage.id, + false, + prepared.userMessage.id, + prepared.nextAbortController.signal + ) + return true +} + +async function finalizeStream( + initiated: InitiateStreamResult, + processed: boolean, + prepared: PreparedSendContext, + set: CopilotSet +): Promise { + if (processed) { + set({ chatsLastLoadedAt: null, chatsLoadedForWorkflow: null }) + return + } + + if (initiated.kind === 'success') { + const { result } = initiated + if (result.error === 'Request was aborted') { + return + } + + let errorContent = result.error || 'Failed to send message' + let errorType: + | 'usage_limit' + | 'unauthorized' + | 'forbidden' + | 'rate_limit' + | 'upgrade_required' + | undefined + if (result.status === 401) { + errorContent = + '_Unauthorized request. You need a valid API key to use the copilot. You can get one by going to [sim.ai](https://sim.ai) settings and generating one there._' + errorType = 'unauthorized' + } else if (result.status === 402) { + errorContent = + '_Usage limit exceeded. To continue using this service, upgrade your plan or increase your usage limit to:_' + errorType = 'usage_limit' + } else if (result.status === 403) { + errorContent = + '_Provider config not allowed for non-enterprise users. Please remove the provider config and try again_' + errorType = 'forbidden' + } else if (result.status === 426) { + errorContent = + '_Please upgrade to the latest version of the Sim platform to continue using the copilot._' + errorType = 'upgrade_required' + } else if (result.status === 429) { + errorContent = '_Provider rate limit exceeded. Please try again later._' + errorType = 'rate_limit' + } + + const errorMessage = createErrorMessage(prepared.streamingMessage.id, errorContent, errorType) + set((state) => ({ + messages: state.messages.map((m) => (m.id === prepared.streamingMessage.id ? errorMessage : m)), + error: errorContent, + isSendingMessage: false, + abortController: null, + })) + set({ activeStream: null }) + writeActiveStreamToStorage(null) + return + } + + const error = initiated.error + if (error instanceof Error && error.name === 'AbortError') return + const errorMessage = createErrorMessage( + prepared.streamingMessage.id, + 'Sorry, I encountered an error while processing your message. Please try again.' + ) + set((state) => ({ + messages: state.messages.map((m) => (m.id === prepared.streamingMessage.id ? errorMessage : m)), + error: error instanceof Error ? error.message : 'Failed to send message', + isSendingMessage: false, + abortController: null, + })) + set({ activeStream: null }) + writeActiveStreamToStorage(null) +} + +interface ResumeValidationResult { + nextStream: CopilotStreamInfo + messages: CopilotMessage[] + isFreshResume: boolean +} + +async function validateResumeState( + get: CopilotGet, + set: CopilotSet +): Promise { + const inMemoryStream = get().activeStream + const storedStream = readActiveStreamFromStorage() + const stored = inMemoryStream || storedStream + logger.debug('[Copilot] Resume check', { + hasInMemory: !!inMemoryStream, + hasStored: !!storedStream, + usingStream: inMemoryStream ? 'memory' : storedStream ? 'storage' : 'none', + streamId: stored?.streamId, + lastEventId: stored?.lastEventId, + storedWorkflowId: stored?.workflowId, + storedChatId: stored?.chatId, + userMessageContent: stored?.userMessageContent?.slice(0, 50), + currentWorkflowId: get().workflowId, + isSendingMessage: get().isSendingMessage, + resumeAttempts: stored?.resumeAttempts, + }) + + if (!stored || !stored.streamId) return null + if (get().isSendingMessage) return null + if (get().workflowId && stored.workflowId !== get().workflowId) return null + + if (stored.resumeAttempts >= MAX_RESUME_ATTEMPTS) { + logger.warn('[Copilot] Too many resume attempts, giving up') + return null + } + + const nextStream: CopilotStreamInfo = { + ...stored, + resumeAttempts: (stored.resumeAttempts || 0) + 1, + } + set({ activeStream: nextStream }) + writeActiveStreamToStorage(nextStream) + + let messages = get().messages + const isFreshResume = messages.length === 0 + if (isFreshResume && nextStream.chatId) { + try { + logger.debug('[Copilot] Loading chat for resume', { chatId: nextStream.chatId }) + const response = await fetch(`${COPILOT_CHAT_API_PATH}?chatId=${nextStream.chatId}`) + if (response.ok) { + const data = await response.json() + if (data.success && data.chat) { + const normalizedMessages = normalizeMessagesForUI(data.chat.messages ?? []) + const toolCallsById = buildToolCallsById(normalizedMessages) + set({ + currentChat: data.chat, + messages: normalizedMessages, + toolCallsById, + streamingPlanContent: data.chat.planArtifact || '', + }) + messages = normalizedMessages + logger.debug('[Copilot] Loaded chat for resume', { + chatId: nextStream.chatId, + messageCount: normalizedMessages.length, + }) + } + } + } catch (e) { + logger.warn('[Copilot] Failed to load chat for resume', { error: String(e) }) + } + } + + return { nextStream, messages, isFreshResume } +} + +interface ReplayBufferedEventsResult { + nextStream: CopilotStreamInfo + bufferedContent: string + replayBlocks: ClientContentBlock[] | null + resumeFromEventId: number +} + +async function replayBufferedEvents( + stream: CopilotStreamInfo, + get: CopilotGet, + set: CopilotSet +): Promise { + let nextStream = stream + let bufferedContent = '' + let replayBlocks: ClientContentBlock[] | null = null + let resumeFromEventId = nextStream.lastEventId + + if (nextStream.lastEventId > 0) { + try { + logger.debug('[Copilot] Fetching all buffered events', { + streamId: nextStream.streamId, + savedLastEventId: nextStream.lastEventId, + }) + const batchUrl = `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent( + nextStream.streamId + )}&from=0&to=${encodeURIComponent(String(nextStream.lastEventId))}&batch=true` + const batchResponse = await fetch(batchUrl, { credentials: 'include' }) + if (batchResponse.ok) { + const batchData = await batchResponse.json() + if (batchData.success && Array.isArray(batchData.events)) { + const replayContext = createClientStreamingContext(nextStream.assistantMessageId) + replayContext.suppressStreamingUpdates = true + for (const entry of batchData.events) { + const event = entry.event + if (event) { + await applySseEvent(event, replayContext, get, set) + } + if (typeof entry.eventId === 'number' && entry.eventId > resumeFromEventId) { + resumeFromEventId = entry.eventId + } + } + bufferedContent = replayContext.accumulatedContent + replayBlocks = replayContext.contentBlocks + logger.debug('[Copilot] Loaded buffered content instantly', { + eventCount: batchData.events.length, + contentLength: bufferedContent.length, + resumeFromEventId, + }) + } else { + logger.warn('[Copilot] Batch response missing events', { + success: batchData.success, + hasEvents: Array.isArray(batchData.events), + }) + } + } else { + logger.warn('[Copilot] Failed to fetch buffered events', { + status: batchResponse.status, + }) + } + } catch (e) { + logger.warn('[Copilot] Failed to fetch buffered events', { error: String(e) }) + } + } + + if (resumeFromEventId > nextStream.lastEventId) { + nextStream = { ...nextStream, lastEventId: resumeFromEventId } + set({ activeStream: nextStream }) + writeActiveStreamToStorage(nextStream) + } + + return { nextStream, bufferedContent, replayBlocks, resumeFromEventId } +} + +interface ResumeFinalizeResult { + nextStream: CopilotStreamInfo + bufferedContent: string + resumeFromEventId: number +} + +function finalizeResume( + messages: CopilotMessage[], + replay: ReplayBufferedEventsResult, + get: CopilotGet, + set: CopilotSet +): ResumeFinalizeResult { + let nextMessages = messages + let cleanedExisting = false + + nextMessages = nextMessages.map((m) => { + if (m.id !== replay.nextStream.assistantMessageId) return m + const hasContinueTag = + (typeof m.content === 'string' && m.content.includes(CONTINUE_OPTIONS_TAG)) || + (Array.isArray(m.contentBlocks) && + m.contentBlocks.some( + (b) => + b.type === 'text' && b.content?.includes(CONTINUE_OPTIONS_TAG) + )) + if (!hasContinueTag) return m + cleanedExisting = true + return { + ...m, + content: stripContinueOption(m.content || ''), + contentBlocks: stripContinueOptionFromBlocks(m.contentBlocks ?? []), + } + }) + + if (!messages.some((m) => m.id === replay.nextStream.userMessageId)) { + const userMessage = createUserMessage( + replay.nextStream.userMessageContent || '', + replay.nextStream.fileAttachments, + replay.nextStream.contexts, + replay.nextStream.userMessageId + ) + nextMessages = [...nextMessages, userMessage] + } + + if (!nextMessages.some((m) => m.id === replay.nextStream.assistantMessageId)) { + const assistantMessage: CopilotMessage = { + ...createStreamingMessage(), + id: replay.nextStream.assistantMessageId, + content: replay.bufferedContent, + contentBlocks: + replay.replayBlocks && replay.replayBlocks.length > 0 + ? replay.replayBlocks + : replay.bufferedContent + ? [{ type: TEXT_BLOCK_TYPE, content: replay.bufferedContent, timestamp: Date.now() }] + : [], + } + nextMessages = [...nextMessages, assistantMessage] + } else if (replay.bufferedContent || (replay.replayBlocks && replay.replayBlocks.length > 0)) { + nextMessages = nextMessages.map((m) => { + if (m.id !== replay.nextStream.assistantMessageId) return m + let nextBlocks = replay.replayBlocks && replay.replayBlocks.length > 0 ? replay.replayBlocks : null + if (!nextBlocks) { + const existingBlocks = Array.isArray(m.contentBlocks) ? m.contentBlocks : [] + const existingText = extractTextFromBlocks(existingBlocks) + if (existingText && replay.bufferedContent.startsWith(existingText)) { + const delta = replay.bufferedContent.slice(existingText.length) + nextBlocks = delta ? appendTextToBlocks(existingBlocks, delta) : cloneContentBlocks(existingBlocks) + } else if (!existingText && existingBlocks.length === 0) { + nextBlocks = replay.bufferedContent + ? [{ type: TEXT_BLOCK_TYPE, content: replay.bufferedContent, timestamp: Date.now() }] + : [] + } else { + nextBlocks = replaceTextBlocks(existingBlocks, replay.bufferedContent) + } + } + return { + ...m, + content: replay.bufferedContent, + contentBlocks: nextBlocks ?? [], + } + }) + } + + if (cleanedExisting || nextMessages !== messages || replay.bufferedContent) { + set({ messages: nextMessages, currentUserMessageId: replay.nextStream.userMessageId }) + } else { + set({ currentUserMessageId: replay.nextStream.userMessageId }) + } + + return { + nextStream: replay.nextStream, + bufferedContent: replay.bufferedContent, + resumeFromEventId: replay.resumeFromEventId, + } +} + +async function resumeFromLiveStream( + resume: ResumeFinalizeResult, + isFreshResume: boolean, + get: CopilotGet, + set: CopilotSet +): Promise { + const abortController = new AbortController() + set({ isSendingMessage: true, abortController }) + + try { + logger.debug('[Copilot] Attempting to resume stream', { + streamId: resume.nextStream.streamId, + savedLastEventId: resume.nextStream.lastEventId, + resumeFromEventId: resume.resumeFromEventId, + isFreshResume, + bufferedContentLength: resume.bufferedContent.length, + assistantMessageId: resume.nextStream.assistantMessageId, + chatId: resume.nextStream.chatId, + }) + const result = await sendStreamingMessage({ + message: resume.nextStream.userMessageContent || '', + userMessageId: resume.nextStream.userMessageId, + workflowId: resume.nextStream.workflowId, + chatId: resume.nextStream.chatId || get().currentChat?.id || undefined, + mode: get().mode === 'ask' ? 'ask' : get().mode === 'plan' ? 'plan' : 'agent', + model: get().selectedModel, + prefetch: get().agentPrefetch, + stream: true, + resumeFromEventId: resume.resumeFromEventId, + abortSignal: abortController.signal, + }) + + logger.info('[Copilot] Resume stream result', { + success: result.success, + hasStream: !!result.stream, + error: result.error, + }) + + if (result.success && result.stream) { + await get().handleStreamingResponse( + result.stream, + resume.nextStream.assistantMessageId, + true, + resume.nextStream.userMessageId, + abortController.signal + ) + return true + } + + set({ isSendingMessage: false, abortController: null }) + } catch (error) { + if (error instanceof Error && (error.name === 'AbortError' || error.message.includes('aborted'))) { + logger.info('[Copilot] Resume stream aborted by user') + set({ isSendingMessage: false, abortController: null }) + return false + } + logger.error('[Copilot] Failed to resume stream', { + error: error instanceof Error ? error.message : String(error), + }) + set({ isSendingMessage: false, abortController: null }) + } + return false +} + // Initial state (subset required for UI/streaming) const initialState = { mode: 'build' as const, @@ -211,7 +875,7 @@ const initialState = { currentChat: null as CopilotChat | null, chats: [] as CopilotChat[], messages: [] as CopilotMessage[], - messageCheckpoints: {} as Record, + messageCheckpoints: {} as Record, messageSnapshots: {} as Record, isLoading: false, isLoadingChats: false, @@ -253,16 +917,17 @@ export const useCopilotStore = create()( // Workflow selection setWorkflowId: async (workflowId: string | null) => { + clearInitialDiffIfNeeded() const currentWorkflowId = get().workflowId if (currentWorkflowId === workflowId) return const { isSendingMessage } = get() if (isSendingMessage) get().abortMessage() // Abort all in-progress tools and clear any diff preview - abortAllInProgressTools(set, get) - try { - useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) - } catch {} + cleanupActiveState( + set as unknown as (partial: Record) => void, + get as unknown as () => Record + ) set({ ...initialState, @@ -293,10 +958,10 @@ export const useCopilotStore = create()( if (currentChat && currentChat.id !== chat.id && isSendingMessage) get().abortMessage() // Abort in-progress tools and clear diff when changing chats - abortAllInProgressTools(set, get) - try { - useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) - } catch {} + cleanupActiveState( + set as unknown as (partial: Record) => void, + get as unknown as () => Record + ) // Restore plan content and config (mode/model) from selected chat const planArtifact = chat.planArtifact || '' @@ -304,7 +969,7 @@ export const useCopilotStore = create()( const chatMode = chatConfig.mode || get().mode const chatModel = chatConfig.model || get().selectedModel - logger.info('[Chat] Restoring chat config', { + logger.debug('[Chat] Restoring chat config', { chatId: chat.id, mode: chatMode, model: chatModel, @@ -336,27 +1001,25 @@ export const useCopilotStore = create()( // Background-save the previous chat's latest messages, plan artifact, and config before switching (optimistic) try { if (previousChat && previousChat.id !== chat.id) { - const dbMessages = serializeMessagesForDB(previousMessages, get().sensitiveCredentialIds) const previousPlanArtifact = get().streamingPlanContent - fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: previousChat.id, - messages: dbMessages, - planArtifact: previousPlanArtifact || null, - config: { - mode: previousMode, - model: previousModel, - }, - }), - }).catch(() => {}) + void persistMessages({ + chatId: previousChat.id, + messages: previousMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: previousPlanArtifact || null, + mode: previousMode, + model: previousModel, + }) } - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to schedule previous-chat background save', { + error: error instanceof Error ? error.message : String(error), + }) + } // Refresh selected chat from server to ensure we have latest messages/tool calls try { - const response = await fetch(`/api/copilot/chat?workflowId=${workflowId}`) + const response = await fetch(`${COPILOT_CHAT_API_PATH}?workflowId=${workflowId}`) if (!response.ok) throw new Error(`Failed to fetch latest chat data: ${response.status}`) const data = await response.json() if (data.success && Array.isArray(data.chats)) { @@ -375,10 +1038,19 @@ export const useCopilotStore = create()( }) try { await get().loadMessageCheckpoints(latestChat.id) - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed loading checkpoints for selected chat', { + chatId: latestChat.id, + error: error instanceof Error ? error.message : String(error), + }) + } } } - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to refresh selected chat from server', { + error: error instanceof Error ? error.message : String(error), + }) + } }, createNewChat: async () => { @@ -386,32 +1058,30 @@ export const useCopilotStore = create()( if (isSendingMessage) get().abortMessage() // Abort in-progress tools and clear diff on new chat - abortAllInProgressTools(set, get) - try { - useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false }) - } catch {} + cleanupActiveState( + set as unknown as (partial: Record) => void, + get as unknown as () => Record + ) // Background-save the current chat before clearing (optimistic) try { const { currentChat, streamingPlanContent, mode, selectedModel } = get() if (currentChat) { const currentMessages = get().messages - const dbMessages = serializeMessagesForDB(currentMessages, get().sensitiveCredentialIds) - fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: currentChat.id, - messages: dbMessages, - planArtifact: streamingPlanContent || null, - config: { - mode, - model: selectedModel, - }, - }), - }).catch(() => {}) + void persistMessages({ + chatId: currentChat.id, + messages: currentMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: streamingPlanContent || null, + mode, + model: selectedModel, + }) } - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to schedule current-chat background save', { + error: error instanceof Error ? error.message : String(error), + }) + } set({ currentChat: null, @@ -427,7 +1097,7 @@ export const useCopilotStore = create()( deleteChat: async (chatId: string) => { try { // Call delete API - const response = await fetch('/api/copilot/chat/delete', { + const response = await fetch(COPILOT_DELETE_CHAT_API_PATH, { method: 'DELETE', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ chatId }), @@ -463,7 +1133,7 @@ export const useCopilotStore = create()( // For now always fetch fresh set({ isLoadingChats: true }) try { - const url = `/api/copilot/chat?workflowId=${workflowId}` + const url = `${COPILOT_CHAT_API_PATH}?workflowId=${workflowId}` const response = await fetch(url) if (!response.ok) { throw new Error(`Failed to fetch chats: ${response.status}`) @@ -510,7 +1180,12 @@ export const useCopilotStore = create()( } try { await get().loadMessageCheckpoints(updatedCurrentChat.id) - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed loading checkpoints for current chat', { + chatId: updatedCurrentChat.id, + error: error instanceof Error ? error.message : String(error), + }) + } } else if (!isSendingMessage && !suppressAutoSelect) { const mostRecentChat: CopilotChat = data.chats[0] const normalizedMessages = normalizeMessagesForUI(mostRecentChat.messages ?? []) @@ -540,7 +1215,12 @@ export const useCopilotStore = create()( }) try { await get().loadMessageCheckpoints(mostRecentChat.id) - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed loading checkpoints for most recent chat', { + chatId: mostRecentChat.id, + error: error instanceof Error ? error.message : String(error), + }) + } } } else { set({ currentChat: null, messages: [] }) @@ -560,526 +1240,32 @@ export const useCopilotStore = create()( // Send a message (streaming only) sendMessage: async (message: string, options = {}) => { - const { - workflowId, - currentChat, - mode, - revertState, - isSendingMessage, - abortController: activeAbortController, - } = get() - const { - stream = true, - fileAttachments, - contexts, - messageId, - queueIfBusy = true, - } = options as { - stream?: boolean - fileAttachments?: MessageFileAttachment[] - contexts?: ChatContext[] - messageId?: string - queueIfBusy?: boolean - } + const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput) + if (!prepared) return - if (!workflowId) return + const initiated = await initiateStream(prepared, get) + let finalizedInitiated = initiated + let processed = false - // If already sending a message, queue this one instead unless bypassing queue - if (isSendingMessage && !activeAbortController) { - logger.warn('[Copilot] sendMessage: stale sending state detected, clearing', { - originalMessageId: messageId, - }) - set({ isSendingMessage: false }) - } else if (isSendingMessage && activeAbortController?.signal.aborted) { - logger.warn('[Copilot] sendMessage: aborted controller detected, clearing', { - originalMessageId: messageId, - }) - set({ isSendingMessage: false, abortController: null }) - } else if (isSendingMessage) { - if (queueIfBusy) { - get().addToQueue(message, { fileAttachments, contexts, messageId }) - logger.info('[Copilot] Message queued (already sending)', { - queueLength: get().messageQueue.length + 1, - originalMessageId: messageId, - }) - return - } - get().abortMessage({ suppressContinueOption: true }) - } - - const nextAbortController = new AbortController() - set({ isSendingMessage: true, error: null, abortController: nextAbortController }) - - const userMessage = createUserMessage(message, fileAttachments, contexts, messageId) - const streamingMessage = createStreamingMessage() - const snapshot = workflowId ? buildCheckpointWorkflowState(workflowId) : null - if (snapshot) { - set((state) => ({ - messageSnapshots: { ...state.messageSnapshots, [userMessage.id]: snapshot }, - })) - } - - get() - .loadSensitiveCredentialIds() - .catch((err) => { - logger.warn('[Copilot] Failed to load sensitive credential IDs', err) - }) - get() - .loadAutoAllowedTools() - .catch((err) => { - logger.warn('[Copilot] Failed to load auto-allowed tools', err) - }) - - let newMessages: CopilotMessage[] - if (revertState) { - const currentMessages = get().messages - newMessages = [...currentMessages, userMessage, streamingMessage] - set({ revertState: null, inputValue: '' }) - } else { - const currentMessages = get().messages - // If messageId is provided, check if it already exists (e.g., from edit flow) - const existingIndex = messageId ? currentMessages.findIndex((m) => m.id === messageId) : -1 - if (existingIndex !== -1) { - // Replace existing message instead of adding new one - newMessages = [...currentMessages.slice(0, existingIndex), userMessage, streamingMessage] - } else { - // Add new messages normally - newMessages = [...currentMessages, userMessage, streamingMessage] - } - } - - const isFirstMessage = get().messages.length === 0 && !currentChat?.title - set((state) => ({ - messages: newMessages, - currentUserMessageId: userMessage.id, - })) - - // Create new stream info and write to storage BEFORE starting the stream - // This ensures that if the user refreshes, they get the correct stream - const activeStream: CopilotStreamInfo = { - streamId: userMessage.id, - workflowId, - chatId: currentChat?.id, - userMessageId: userMessage.id, - assistantMessageId: streamingMessage.id, - lastEventId: 0, - resumeAttempts: 0, - userMessageContent: message, - fileAttachments, - contexts, - startedAt: Date.now(), - } - logger.info('[Copilot] Creating new active stream', { - streamId: activeStream.streamId, - workflowId: activeStream.workflowId, - chatId: activeStream.chatId, - userMessageContent: message.slice(0, 50), - }) - set({ activeStream }) - writeActiveStreamToStorage(activeStream) - - if (isFirstMessage) { - const optimisticTitle = message.length > 50 ? `${message.substring(0, 47)}...` : message - set((state) => ({ - currentChat: state.currentChat - ? { ...state.currentChat, title: optimisticTitle } - : state.currentChat, - chats: state.currentChat - ? state.chats.map((c) => - c.id === state.currentChat!.id ? { ...c, title: optimisticTitle } : c - ) - : state.chats, - })) - } - - try { - // Debug: log contexts presence before sending + if (initiated.kind === 'success') { try { - logger.info('sendMessage: preparing request', { - hasContexts: Array.isArray(contexts), - contextsCount: Array.isArray(contexts) ? contexts.length : 0, - contextsPreview: Array.isArray(contexts) - ? contexts.map((c: any) => ({ - kind: c?.kind, - chatId: (c as any)?.chatId, - workflowId: (c as any)?.workflowId, - label: (c as any)?.label, - })) - : undefined, - }) - } catch {} - - // Prepend design document to message if available - const { streamingPlanContent } = get() - let messageToSend = message - if (streamingPlanContent?.trim()) { - messageToSend = `Design Document:\n\n${streamingPlanContent}\n\n==============\n\nUser Query:\n\n${message}` - logger.info('[DesignDocument] Prepending plan content to message', { - planLength: streamingPlanContent.length, - originalMessageLength: message.length, - finalMessageLength: messageToSend.length, - }) + processed = await processStreamEvents(initiated, prepared, get) + } catch (error) { + finalizedInitiated = { kind: 'error', error } + processed = false } - - // Call copilot API - const apiMode: CopilotTransportMode = - mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent' - - // Extract slash commands from contexts (lowercase) and filter them out from contexts - // Map UI command IDs to API command IDs (e.g., "actions" -> "superagent") - const uiToApiCommandMap: Record = { actions: 'superagent' } - const commands = contexts - ?.filter((c) => c.kind === 'slash_command' && 'command' in c) - .map((c) => { - const uiCommand = (c as any).command.toLowerCase() - return uiToApiCommandMap[uiCommand] || uiCommand - }) as string[] | undefined - const filteredContexts = contexts?.filter((c) => c.kind !== 'slash_command') - - const result = await sendStreamingMessage({ - message: messageToSend, - userMessageId: userMessage.id, - chatId: currentChat?.id, - workflowId: workflowId || undefined, - mode: apiMode, - model: get().selectedModel, - prefetch: get().agentPrefetch, - createNewChat: !currentChat, - stream, - fileAttachments, - contexts: filteredContexts, - commands: commands?.length ? commands : undefined, - abortSignal: nextAbortController.signal, - }) - - if (result.success && result.stream) { - await get().handleStreamingResponse( - result.stream, - streamingMessage.id, - false, - userMessage.id, - nextAbortController.signal - ) - set({ chatsLastLoadedAt: null, chatsLoadedForWorkflow: null }) - } else { - if (result.error === 'Request was aborted') { - return - } - - // Check for specific status codes and provide custom messages - let errorContent = result.error || 'Failed to send message' - let errorType: - | 'usage_limit' - | 'unauthorized' - | 'forbidden' - | 'rate_limit' - | 'upgrade_required' - | undefined - if (result.status === 401) { - errorContent = - '_Unauthorized request. You need a valid API key to use the copilot. You can get one by going to [sim.ai](https://sim.ai) settings and generating one there._' - errorType = 'unauthorized' - } else if (result.status === 402) { - errorContent = - '_Usage limit exceeded. To continue using this service, upgrade your plan or increase your usage limit to:_' - errorType = 'usage_limit' - } else if (result.status === 403) { - errorContent = - '_Provider config not allowed for non-enterprise users. Please remove the provider config and try again_' - errorType = 'forbidden' - } else if (result.status === 426) { - errorContent = - '_Please upgrade to the latest version of the Sim platform to continue using the copilot._' - errorType = 'upgrade_required' - } else if (result.status === 429) { - errorContent = '_Provider rate limit exceeded. Please try again later._' - errorType = 'rate_limit' - } - - const errorMessage = createErrorMessage(streamingMessage.id, errorContent, errorType) - set((state) => ({ - messages: state.messages.map((m) => (m.id === streamingMessage.id ? errorMessage : m)), - error: errorContent, - isSendingMessage: false, - abortController: null, - })) - set({ activeStream: null }) - writeActiveStreamToStorage(null) - } - } catch (error) { - if (error instanceof Error && error.name === 'AbortError') return - const errorMessage = createErrorMessage( - streamingMessage.id, - 'Sorry, I encountered an error while processing your message. Please try again.' - ) - set((state) => ({ - messages: state.messages.map((m) => (m.id === streamingMessage.id ? errorMessage : m)), - error: error instanceof Error ? error.message : 'Failed to send message', - isSendingMessage: false, - abortController: null, - })) - set({ activeStream: null }) - writeActiveStreamToStorage(null) } + + await finalizeStream(finalizedInitiated, processed, prepared, set) }, resumeActiveStream: async () => { - const inMemoryStream = get().activeStream - const storedStream = readActiveStreamFromStorage() - const stored = inMemoryStream || storedStream - logger.info('[Copilot] Resume check', { - hasInMemory: !!inMemoryStream, - hasStored: !!storedStream, - usingStream: inMemoryStream ? 'memory' : storedStream ? 'storage' : 'none', - streamId: stored?.streamId, - lastEventId: stored?.lastEventId, - storedWorkflowId: stored?.workflowId, - storedChatId: stored?.chatId, - userMessageContent: stored?.userMessageContent?.slice(0, 50), - currentWorkflowId: get().workflowId, - isSendingMessage: get().isSendingMessage, - resumeAttempts: stored?.resumeAttempts, - }) - if (!stored || !stored.streamId) return false - if (get().isSendingMessage) return false - if (get().workflowId && stored.workflowId !== get().workflowId) return false + const validated = await validateResumeState(get, set) + if (!validated) return false - if (stored.resumeAttempts >= 3) { - logger.warn('[Copilot] Too many resume attempts, giving up') - return false - } - - let nextStream: CopilotStreamInfo = { - ...stored, - resumeAttempts: (stored.resumeAttempts || 0) + 1, - } - set({ activeStream: nextStream }) - writeActiveStreamToStorage(nextStream) - - // Load existing chat messages from database if we have a chatId but no messages - let messages = get().messages - // Track if this is a fresh page load (no messages in memory) - const isFreshResume = messages.length === 0 - if (isFreshResume && nextStream.chatId) { - try { - logger.info('[Copilot] Loading chat for resume', { chatId: nextStream.chatId }) - const response = await fetch(`/api/copilot/chat?chatId=${nextStream.chatId}`) - if (response.ok) { - const data = await response.json() - if (data.success && data.chat) { - const normalizedMessages = normalizeMessagesForUI(data.chat.messages ?? []) - const toolCallsById = buildToolCallsById(normalizedMessages) - set({ - currentChat: data.chat, - messages: normalizedMessages, - toolCallsById, - streamingPlanContent: data.chat.planArtifact || '', - }) - messages = normalizedMessages - logger.info('[Copilot] Loaded chat for resume', { - chatId: nextStream.chatId, - messageCount: normalizedMessages.length, - }) - } - } - } catch (e) { - logger.warn('[Copilot] Failed to load chat for resume', { error: String(e) }) - } - } - - let bufferedContent = '' - let replayBlocks: any[] | null = null - let resumeFromEventId = nextStream.lastEventId - if (nextStream.lastEventId > 0) { - try { - logger.info('[Copilot] Fetching all buffered events', { - streamId: nextStream.streamId, - savedLastEventId: nextStream.lastEventId, - }) - const batchUrl = `/api/copilot/chat/stream?streamId=${encodeURIComponent( - nextStream.streamId - )}&from=0&to=${encodeURIComponent(String(nextStream.lastEventId))}&batch=true` - const batchResponse = await fetch(batchUrl, { credentials: 'include' }) - if (batchResponse.ok) { - const batchData = await batchResponse.json() - if (batchData.success && Array.isArray(batchData.events)) { - const replayContext = createStreamingContext(nextStream.assistantMessageId) - replayContext.suppressStreamingUpdates = true - for (const entry of batchData.events) { - const event = entry.event - if (event) { - await applySseEvent(event, replayContext, get, set) - } - if (typeof entry.eventId === 'number' && entry.eventId > resumeFromEventId) { - resumeFromEventId = entry.eventId - } - } - bufferedContent = replayContext.accumulatedContent - replayBlocks = replayContext.contentBlocks - logger.info('[Copilot] Loaded buffered content instantly', { - eventCount: batchData.events.length, - contentLength: bufferedContent.length, - resumeFromEventId, - }) - } else { - logger.warn('[Copilot] Batch response missing events', { - success: batchData.success, - hasEvents: Array.isArray(batchData.events), - }) - } - } else { - logger.warn('[Copilot] Failed to fetch buffered events', { - status: batchResponse.status, - }) - } - } catch (e) { - logger.warn('[Copilot] Failed to fetch buffered events', { error: String(e) }) - } - } - if (resumeFromEventId > nextStream.lastEventId) { - nextStream = { ...nextStream, lastEventId: resumeFromEventId } - set({ activeStream: nextStream }) - writeActiveStreamToStorage(nextStream) - } - - let nextMessages = messages - let cleanedExisting = false - nextMessages = nextMessages.map((m) => { - if (m.id !== nextStream.assistantMessageId) return m - const hasContinueTag = - (typeof m.content === 'string' && m.content.includes(CONTINUE_OPTIONS_TAG)) || - (Array.isArray(m.contentBlocks) && - m.contentBlocks.some( - (b: any) => - b?.type === TEXT_BLOCK_TYPE && - typeof b.content === 'string' && - b.content.includes(CONTINUE_OPTIONS_TAG) - )) - if (!hasContinueTag) return m - cleanedExisting = true - return { - ...m, - content: stripContinueOption(m.content || ''), - contentBlocks: stripContinueOptionFromBlocks(m.contentBlocks ?? []), - } - }) - - if (!messages.some((m) => m.id === nextStream.userMessageId)) { - const userMessage = createUserMessage( - nextStream.userMessageContent || '', - nextStream.fileAttachments, - nextStream.contexts, - nextStream.userMessageId - ) - nextMessages = [...nextMessages, userMessage] - } - - if (!nextMessages.some((m) => m.id === nextStream.assistantMessageId)) { - const assistantMessage: CopilotMessage = { - ...createStreamingMessage(), - id: nextStream.assistantMessageId, - content: bufferedContent, - contentBlocks: - replayBlocks && replayBlocks.length > 0 - ? replayBlocks - : bufferedContent - ? [{ type: TEXT_BLOCK_TYPE, content: bufferedContent, timestamp: Date.now() }] - : [], - } - nextMessages = [...nextMessages, assistantMessage] - } else if (bufferedContent || (replayBlocks && replayBlocks.length > 0)) { - nextMessages = nextMessages.map((m) => { - if (m.id !== nextStream.assistantMessageId) return m - let nextBlocks = replayBlocks && replayBlocks.length > 0 ? replayBlocks : null - if (!nextBlocks) { - const existingBlocks = Array.isArray(m.contentBlocks) ? m.contentBlocks : [] - const existingText = extractTextFromBlocks(existingBlocks) - if (existingText && bufferedContent.startsWith(existingText)) { - const delta = bufferedContent.slice(existingText.length) - nextBlocks = delta - ? appendTextToBlocks(existingBlocks, delta) - : cloneContentBlocks(existingBlocks) - } else if (!existingText && existingBlocks.length === 0) { - nextBlocks = bufferedContent - ? [{ type: TEXT_BLOCK_TYPE, content: bufferedContent, timestamp: Date.now() }] - : [] - } else { - nextBlocks = replaceTextBlocks(existingBlocks, bufferedContent) - } - } - return { - ...m, - content: bufferedContent, - contentBlocks: nextBlocks ?? [], - } - }) - } - - if (cleanedExisting || nextMessages !== messages || bufferedContent) { - set({ messages: nextMessages, currentUserMessageId: nextStream.userMessageId }) - } else { - set({ currentUserMessageId: nextStream.userMessageId }) - } - - const abortController = new AbortController() - set({ isSendingMessage: true, abortController }) - - try { - logger.info('[Copilot] Attempting to resume stream', { - streamId: nextStream.streamId, - savedLastEventId: nextStream.lastEventId, - resumeFromEventId, - isFreshResume, - bufferedContentLength: bufferedContent.length, - assistantMessageId: nextStream.assistantMessageId, - chatId: nextStream.chatId, - }) - const result = await sendStreamingMessage({ - message: nextStream.userMessageContent || '', - userMessageId: nextStream.userMessageId, - workflowId: nextStream.workflowId, - chatId: nextStream.chatId || get().currentChat?.id || undefined, - mode: get().mode === 'ask' ? 'ask' : get().mode === 'plan' ? 'plan' : 'agent', - model: get().selectedModel, - prefetch: get().agentPrefetch, - stream: true, - resumeFromEventId, - abortSignal: abortController.signal, - }) - - logger.info('[Copilot] Resume stream result', { - success: result.success, - hasStream: !!result.stream, - error: result.error, - }) - - if (result.success && result.stream) { - await get().handleStreamingResponse( - result.stream, - nextStream.assistantMessageId, - true, - nextStream.userMessageId, - abortController.signal - ) - return true - } - set({ isSendingMessage: false, abortController: null }) - } catch (error) { - // Handle AbortError gracefully - expected when user aborts - if ( - error instanceof Error && - (error.name === 'AbortError' || error.message.includes('aborted')) - ) { - logger.info('[Copilot] Resume stream aborted by user') - set({ isSendingMessage: false, abortController: null }) - return false - } - logger.error('[Copilot] Failed to resume stream', { - error: error instanceof Error ? error.message : String(error), - }) - set({ isSendingMessage: false, abortController: null }) - } - return false + const replayed = await replayBufferedEvents(validated.nextStream, get, set) + const finalized = finalizeResume(validated.messages, replayed, get, set) + return resumeFromLiveStream(finalized, validated.isFreshResume, get, set) }, // Abort streaming @@ -1087,7 +1273,7 @@ export const useCopilotStore = create()( const { abortController, isSendingMessage, messages } = get() if (!isSendingMessage || !abortController) return // Suppress continue option if explicitly requested OR if page is unloading (refresh/close) - const suppressContinueOption = options?.suppressContinueOption === true || isPageUnloading + const suppressContinueOption = options?.suppressContinueOption === true || isPageUnloading() set({ isAborting: true, suppressAbortContinueOption: suppressContinueOption }) try { abortController.abort() @@ -1098,7 +1284,7 @@ export const useCopilotStore = create()( const textContent = lastMessage.contentBlocks ?.filter((b) => b.type === 'text') - .map((b: any) => b.content) + .map((b) => b.content ?? '') .join('') || '' const nextContentBlocks = suppressContinueOption ? (lastMessage.contentBlocks ?? []) @@ -1132,7 +1318,7 @@ export const useCopilotStore = create()( // Only clear active stream for user-initiated aborts, NOT page unload // During page unload, keep the stream info so we can resume after refresh - if (!isPageUnloading) { + if (!isPageUnloading()) { set({ activeStream: null }) writeActiveStreamToStorage(null) } @@ -1145,26 +1331,27 @@ export const useCopilotStore = create()( if (currentChat) { try { const currentMessages = get().messages - const dbMessages = serializeMessagesForDB(currentMessages, get().sensitiveCredentialIds) - fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: currentChat.id, - messages: dbMessages, - planArtifact: streamingPlanContent || null, - config: { - mode, - model: selectedModel, - }, - }), - }).catch(() => {}) - } catch {} + void persistMessages({ + chatId: currentChat.id, + messages: currentMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: streamingPlanContent || null, + mode, + model: selectedModel, + }) + } catch (error) { + logger.warn('[Copilot] Failed to queue abort snapshot persistence', { + error: error instanceof Error ? error.message : String(error), + }) + } } - } catch { + } catch (error) { + logger.warn('[Copilot] Abort flow encountered an error', { + error: error instanceof Error ? error.message : String(error), + }) set({ isSendingMessage: false, isAborting: false }) // Only clear active stream for user-initiated aborts, NOT page unload - if (!isPageUnloading) { + if (!isPageUnloading()) { set({ activeStream: null }) writeActiveStreamToStorage(null) } @@ -1235,7 +1422,7 @@ export const useCopilotStore = create()( }, // Tool-call related APIs are stubbed for now - setToolCallState: (toolCall: any, newState: any) => { + setToolCallState: (toolCall: CopilotToolCall, newState: ClientToolCallState | string) => { try { const id: string | undefined = toolCall?.id if (!id) return @@ -1245,7 +1432,7 @@ export const useCopilotStore = create()( // Preserve rejected state from being overridden if ( isRejectedState(current.state) && - (newState === 'success' || newState === (ClientToolCallState as any).success) + (newState === 'success' || newState === ClientToolCallState.success) ) { return } @@ -1264,10 +1451,15 @@ export const useCopilotStore = create()( display: resolveToolDisplay(current.name, norm, id, current.params), } set({ toolCallsById: map }) - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to update tool call state', { + error: error instanceof Error ? error.message : String(error), + toolCallId: toolCall?.id, + }) + } }, - updateToolCallParams: (toolCallId: string, params: Record) => { + updateToolCallParams: (toolCallId: string, params: Record) => { try { if (!toolCallId) return const map = { ...get().toolCallsById } @@ -1280,7 +1472,12 @@ export const useCopilotStore = create()( display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams), } set({ toolCallsById: map }) - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to update tool call params', { + error: error instanceof Error ? error.message : String(error), + toolCallId, + }) + } }, updatePreviewToolCallState: ( toolCallState: 'accepted' | 'rejected' | 'error', @@ -1301,7 +1498,7 @@ export const useCopilotStore = create()( outer: for (let mi = messages.length - 1; mi >= 0; mi--) { const m = messages[mi] if (m.role !== 'assistant' || !m.contentBlocks) continue - const blocks = m.contentBlocks as any[] + const blocks = m.contentBlocks for (let bi = blocks.length - 1; bi >= 0; bi--) { const b = blocks[bi] if (b?.type === 'tool_call') { @@ -1323,7 +1520,7 @@ export const useCopilotStore = create()( const current = toolCallsById[id] if (!current) return // Do not override a rejected tool with success - if (isRejectedState(current.state) && targetState === (ClientToolCallState as any).success) { + if (isRejectedState(current.state) && targetState === ClientToolCallState.success) { return } @@ -1344,15 +1541,14 @@ export const useCopilotStore = create()( const m = messages[mi] if (m.role !== 'assistant' || !m.contentBlocks) continue let changed = false - const blocks = m.contentBlocks.map((b: any) => { + const blocks = m.contentBlocks.map((b) => { if (b.type === 'tool_call' && b.toolCall?.id === id) { changed = true - const prev = b.toolCall ?? {} return { ...b, toolCall: { - ...prev, - id, + ...b.toolCall, + id: id!, name: current.name, state: targetState, display: updatedDisplay, @@ -1371,15 +1567,27 @@ export const useCopilotStore = create()( }) try { - fetch('/api/copilot/confirm', { + fetch(COPILOT_CONFIRM_API_PATH, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ toolCallId: id, status: toolCallState, }), - }).catch(() => {}) - } catch {} + }).catch((error) => { + logger.warn('[Copilot] Failed to send tool confirmation', { + error: error instanceof Error ? error.message : String(error), + toolCallId: id, + status: toolCallState, + }) + }) + } catch (error) { + logger.warn('[Copilot] Failed to queue tool confirmation request', { + error: error instanceof Error ? error.message : String(error), + toolCallId: id, + status: toolCallState, + }) + } }, loadMessageCheckpoints: async (chatId: string) => { @@ -1387,16 +1595,19 @@ export const useCopilotStore = create()( if (!workflowId) return set({ isLoadingCheckpoints: true, checkpointError: null }) try { - const response = await fetch(`/api/copilot/checkpoints?chatId=${chatId}`) + const response = await fetch(`${COPILOT_CHECKPOINTS_API_PATH}?chatId=${chatId}`) if (!response.ok) throw new Error(`Failed to load checkpoints: ${response.statusText}`) const data = await response.json() if (data.success && Array.isArray(data.checkpoints)) { - const grouped = data.checkpoints.reduce((acc: Record, cp: any) => { - const key = cp.messageId || '__no_message__' - acc[key] = acc[key] ?? [] - acc[key].push(cp) - return acc - }, {}) + const grouped = (data.checkpoints as CheckpointEntry[]).reduce( + (acc: Record, cp: CheckpointEntry) => { + const key = cp.messageId || '__no_message__' + acc[key] = acc[key] ?? [] + acc[key].push(cp) + return acc + }, + {} + ) set({ messageCheckpoints: grouped, isLoadingCheckpoints: false }) } else { throw new Error('Invalid checkpoints response') @@ -1417,9 +1628,9 @@ export const useCopilotStore = create()( try { const { messageCheckpoints } = get() const checkpointMessageId = Object.entries(messageCheckpoints).find(([, cps]) => - (cps ?? []).some((cp: any) => cp?.id === checkpointId) + (cps ?? []).some((cp) => cp?.id === checkpointId) )?.[0] - const response = await fetch('/api/copilot/checkpoints/revert', { + const response = await fetch(COPILOT_CHECKPOINTS_REVERT_API_PATH, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ checkpointId }), @@ -1434,7 +1645,11 @@ export const useCopilotStore = create()( // Clear any active diff preview try { useWorkflowDiffStore.getState().clearDiff() - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to clear diff before checkpoint revert', { + error: error instanceof Error ? error.message : String(error), + }) + } // Apply to main workflow store useWorkflowStore.setState({ @@ -1447,14 +1662,13 @@ export const useCopilotStore = create()( }) // Extract and apply subblock values - const values: Record> = {} - Object.entries(reverted.blocks ?? {}).forEach(([blockId, block]: [string, any]) => { + const values: Record> = {} + Object.entries(reverted.blocks ?? {}).forEach(([blockId, block]) => { + const typedBlock = block as { subBlocks?: Record } values[blockId] = {} - Object.entries((block as any).subBlocks ?? {}).forEach( - ([subId, sub]: [string, any]) => { - values[blockId][subId] = (sub as any)?.value - } - ) + Object.entries(typedBlock.subBlocks ?? {}).forEach(([subId, sub]) => { + values[blockId][subId] = sub?.value + }) }) const subState = useSubBlockStore.getState() useSubBlockStore.setState({ @@ -1500,7 +1714,7 @@ export const useCopilotStore = create()( const startTimeMs = Date.now() const expectedStreamId = triggerUserMessageId - const context = createStreamingContext(assistantMessageId) + const context = createClientStreamingContext(assistantMessageId) if (isContinuation) { context.suppressContinueOption = true } @@ -1508,7 +1722,7 @@ export const useCopilotStore = create()( if (isContinuation) { const { messages } = get() const existingMessage = messages.find((m) => m.id === assistantMessageId) - logger.info('[Copilot] Continuation init', { + logger.debug('[Copilot] Continuation init', { hasMessage: !!existingMessage, contentLength: existingMessage?.content?.length || 0, contentPreview: existingMessage?.content?.slice(0, 100) || '', @@ -1527,10 +1741,12 @@ export const useCopilotStore = create()( context.contentBlocks = clonedBlocks context.currentTextBlock = findLastTextBlock(clonedBlocks) } else if (existingMessage.content) { - const textBlock = { type: '', content: '', timestamp: 0, toolCall: null } - textBlock.type = TEXT_BLOCK_TYPE - textBlock.content = existingMessage.content - textBlock.timestamp = Date.now() + const textBlock: ClientContentBlock = { + type: 'text', + content: existingMessage.content, + timestamp: Date.now(), + toolCall: null, + } context.contentBlocks = [textBlock] context.currentTextBlock = textBlock context.accumulatedContent += existingMessage.content @@ -1541,14 +1757,14 @@ export const useCopilotStore = create()( const timeoutId = setTimeout(() => { logger.warn('Stream timeout reached, completing response') reader.cancel() - }, 600000) + }, STREAM_TIMEOUT_MS) try { for await (const data of parseSSEStream(reader, decoder, abortSignal)) { if (abortSignal?.aborted) { context.wasAborted = true const { suppressAbortContinueOption } = get() - context.suppressContinueOption = suppressAbortContinueOption === true || isPageUnloading + context.suppressContinueOption = suppressAbortContinueOption === true || isPageUnloading() if (suppressAbortContinueOption) { set({ suppressAbortContinueOption: false }) } @@ -1575,13 +1791,13 @@ export const useCopilotStore = create()( } // Log SSE events for debugging - logger.info('[SSE] Received event', { + logger.debug('[SSE] Received event', { type: data.type, hasSubAgent: !!data.subagent, subagent: data.subagent, dataPreview: typeof data.data === 'string' - ? data.data.substring(0, 100) + ? (data.data as string).substring(0, 100) : JSON.stringify(data.data)?.substring(0, 100), }) @@ -1590,15 +1806,15 @@ export const useCopilotStore = create()( } if (!context.wasAborted && sseHandlers.stream_end) { - sseHandlers.stream_end({}, context, get, set) + sseHandlers.stream_end({ type: 'done' }, context, get, set) } stopStreamingUpdates() - let sanitizedContentBlocks: any[] = [] + let sanitizedContentBlocks: ClientContentBlock[] = [] if (context.contentBlocks && context.contentBlocks.length > 0) { - const optimizedBlocks = context.contentBlocks.map((block: any) => ({ ...block })) - sanitizedContentBlocks = optimizedBlocks.map((block: any) => + const optimizedBlocks = context.contentBlocks.map((block) => ({ ...block })) + sanitizedContentBlocks = optimizedBlocks.map((block) => block.type === TEXT_BLOCK_TYPE && typeof block.content === 'string' ? { ...block, content: stripTodoTags(block.content) } : block @@ -1656,7 +1872,7 @@ export const useCopilotStore = create()( }) // Only clear active stream if stream completed normally or user aborted (not page unload) - if ((context.streamComplete || context.wasAborted) && !isPageUnloading) { + if ((context.streamComplete || context.wasAborted) && !isPageUnloading()) { set({ activeStream: null }) writeActiveStreamToStorage(null) } @@ -1670,7 +1886,7 @@ export const useCopilotStore = create()( if (nextInQueue) { // Use originalMessageId if available (from edit/resend), otherwise use queue entry id const messageIdToUse = nextInQueue.originalMessageId || nextInQueue.id - logger.info('[Queue] Processing next queued message', { + logger.debug('[Queue] Processing next queued message', { id: nextInQueue.id, originalMessageId: nextInQueue.originalMessageId, messageIdToUse, @@ -1686,7 +1902,7 @@ export const useCopilotStore = create()( contexts: nextInQueue.contexts, messageId: messageIdToUse, }) - }, 100) + }, QUEUE_PROCESS_DELAY_MS) } // Persist full message state (including contentBlocks), plan artifact, and config to database @@ -1697,40 +1913,35 @@ export const useCopilotStore = create()( // Debug: Log what we're about to serialize const lastMsg = currentMessages[currentMessages.length - 1] if (lastMsg?.role === 'assistant') { - logger.info('[Stream Done] About to serialize - last message state', { + logger.debug('[Stream Done] About to serialize - last message state', { id: lastMsg.id, contentLength: lastMsg.content?.length || 0, hasContentBlocks: !!lastMsg.contentBlocks, contentBlockCount: lastMsg.contentBlocks?.length || 0, - contentBlockTypes: (lastMsg.contentBlocks as any[])?.map((b) => b?.type) ?? [], + contentBlockTypes: lastMsg.contentBlocks?.map((b) => b?.type) ?? [], }) } - const dbMessages = serializeMessagesForDB(currentMessages, get().sensitiveCredentialIds) const config = { mode, model: selectedModel, } - const saveResponse = await fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: currentChat.id, - messages: dbMessages, - planArtifact: streamingPlanContent || null, - config, - }), + const persisted = await persistMessages({ + chatId: currentChat.id, + messages: currentMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: streamingPlanContent || null, + mode, + model: selectedModel, }) - if (!saveResponse.ok) { - const errorText = await saveResponse.text().catch(() => '') + if (!persisted) { logger.error('[Stream Done] Failed to save messages to DB', { - status: saveResponse.status, - error: errorText, + chatId: currentChat.id, }) } else { logger.info('[Stream Done] Successfully saved messages to DB', { - messageCount: dbMessages.length, + messageCount: currentMessages.length, }) } @@ -1747,16 +1958,11 @@ export const useCopilotStore = create()( } } - // Post copilot_stats record (input/output tokens can be null for now) - try { - // Removed: stats sending now occurs only on accept/reject with minimal payload - } catch {} - // Invalidate subscription queries to update usage setTimeout(() => { const queryClient = getQueryClient() queryClient.invalidateQueries({ queryKey: subscriptionKeys.all }) - }, 1000) + }, SUBSCRIPTION_INVALIDATE_DELAY_MS) } finally { clearTimeout(timeoutId) } @@ -1783,7 +1989,11 @@ export const useCopilotStore = create()( abortAllInProgressTools(set, get) try { useWorkflowDiffStore.getState().clearDiff() - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to clear diff on new chat creation', { + error: error instanceof Error ? error.message : String(error), + }) + } set({ currentChat: newChat, @@ -1807,7 +2017,11 @@ export const useCopilotStore = create()( // Clear any diff on cleanup try { useWorkflowDiffStore.getState().clearDiff() - } catch {} + } catch (error) { + logger.warn('[Copilot] Failed to clear diff on cleanup', { + error: error instanceof Error ? error.message : String(error), + }) + } }, reset: () => { @@ -1845,21 +2059,14 @@ export const useCopilotStore = create()( if (currentChat) { try { const currentMessages = get().messages - const dbMessages = serializeMessagesForDB(currentMessages, get().sensitiveCredentialIds) const { mode, selectedModel } = get() - - await fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: currentChat.id, - messages: dbMessages, - planArtifact: null, - config: { - mode, - model: selectedModel, - }, - }), + await persistMessages({ + chatId: currentChat.id, + messages: currentMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: null, + mode, + model: selectedModel, }) // Update local chat object @@ -1887,21 +2094,14 @@ export const useCopilotStore = create()( if (currentChat) { try { const currentMessages = get().messages - const dbMessages = serializeMessagesForDB(currentMessages, get().sensitiveCredentialIds) const { mode, selectedModel } = get() - - await fetch('/api/copilot/chat/update-messages', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - chatId: currentChat.id, - messages: dbMessages, - planArtifact: content, - config: { - mode, - model: selectedModel, - }, - }), + await persistMessages({ + chatId: currentChat.id, + messages: currentMessages, + sensitiveCredentialIds: get().sensitiveCredentialIds, + planArtifact: content, + mode, + model: selectedModel, }) // Update local chat object @@ -1930,14 +2130,14 @@ export const useCopilotStore = create()( loadAutoAllowedTools: async () => { try { - logger.info('[AutoAllowedTools] Loading from API...') - const res = await fetch('/api/copilot/auto-allowed-tools') - logger.info('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok }) + logger.debug('[AutoAllowedTools] Loading from API...') + const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH) + logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok }) if (res.ok) { const data = await res.json() const tools = data.autoAllowedTools ?? [] set({ autoAllowedTools: tools }) - logger.info('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools }) + logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools }) } else { logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status }) } @@ -1948,18 +2148,18 @@ export const useCopilotStore = create()( addAutoAllowedTool: async (toolId: string) => { try { - logger.info('[AutoAllowedTools] Adding tool...', { toolId }) - const res = await fetch('/api/copilot/auto-allowed-tools', { + logger.debug('[AutoAllowedTools] Adding tool...', { toolId }) + const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ toolId }), }) - logger.info('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok }) + logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok }) if (res.ok) { const data = await res.json() - logger.info('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools }) + logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools }) set({ autoAllowedTools: data.autoAllowedTools ?? [] }) - logger.info('[AutoAllowedTools] Added tool to store', { toolId }) + logger.debug('[AutoAllowedTools] Added tool to store', { toolId }) } } catch (err) { logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err }) @@ -1969,7 +2169,7 @@ export const useCopilotStore = create()( removeAutoAllowedTool: async (toolId: string) => { try { const res = await fetch( - `/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolId)}`, + `${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`, { method: 'DELETE', } @@ -1977,7 +2177,7 @@ export const useCopilotStore = create()( if (res.ok) { const data = await res.json() set({ autoAllowedTools: data.autoAllowedTools ?? [] }) - logger.info('[AutoAllowedTools] Removed tool', { toolId }) + logger.debug('[AutoAllowedTools] Removed tool', { toolId }) } } catch (err) { logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err }) @@ -1992,7 +2192,7 @@ export const useCopilotStore = create()( // Credential masking loadSensitiveCredentialIds: async () => { try { - const res = await fetch('/api/copilot/credentials', { + const res = await fetch(COPILOT_CREDENTIALS_API_PATH, { credentials: 'include', }) if (!res.ok) { @@ -2004,7 +2204,7 @@ export const useCopilotStore = create()( const json = await res.json() // Credentials are at result.oauth.connected.credentials const credentials = json?.result?.oauth?.connected?.credentials ?? [] - logger.info('[loadSensitiveCredentialIds] Response', { + logger.debug('[loadSensitiveCredentialIds] Response', { hasResult: !!json?.result, credentialCount: credentials.length, }) @@ -2015,7 +2215,7 @@ export const useCopilotStore = create()( } } set({ sensitiveCredentialIds: ids }) - logger.info('[loadSensitiveCredentialIds] Loaded credential IDs', { + logger.debug('[loadSensitiveCredentialIds] Loaded credential IDs', { count: ids.size, }) } catch (err) { @@ -2058,7 +2258,7 @@ export const useCopilotStore = create()( removeFromQueue: (id) => { set({ messageQueue: get().messageQueue.filter((m) => m.id !== id) }) - logger.info('[Queue] Message removed from queue', { + logger.debug('[Queue] Message removed from queue', { id, queueLength: get().messageQueue.length, }) @@ -2072,7 +2272,7 @@ export const useCopilotStore = create()( queue.splice(index, 1) queue.splice(index - 1, 0, item) set({ messageQueue: queue }) - logger.info('[Queue] Message moved up in queue', { id, newIndex: index - 1 }) + logger.debug('[Queue] Message moved up in queue', { id, newIndex: index - 1 }) } }, diff --git a/apps/sim/stores/panel/copilot/types.ts b/apps/sim/stores/panel/copilot/types.ts index 07e77ea60..e03c07f9d 100644 --- a/apps/sim/stores/panel/copilot/types.ts +++ b/apps/sim/stores/panel/copilot/types.ts @@ -2,6 +2,7 @@ import type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models' +import type { ClientContentBlock } from '@/lib/copilot/client-sse/types' import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool' import type { WorkflowState } from '@/stores/workflows/workflow/types' @@ -21,7 +22,8 @@ export interface CopilotToolCall { id: string name: string state: ClientToolCallState - params?: Record + params?: Record + input?: Record display?: ClientToolDisplay /** Content streamed from a subagent (e.g., debug agent) */ subAgentContent?: string @@ -62,18 +64,7 @@ export interface CopilotMessage { timestamp: string citations?: { id: number; title: string; url: string; similarity?: number }[] toolCalls?: CopilotToolCall[] - contentBlocks?: Array< - | { type: 'text'; content: string; timestamp: number } - | { - type: 'thinking' - content: string - timestamp: number - duration?: number - startTime?: number - } - | { type: 'tool_call'; toolCall: CopilotToolCall; timestamp: number } - | { type: 'contexts'; contexts: ChatContext[]; timestamp: number } - > + contentBlocks?: ClientContentBlock[] fileAttachments?: MessageFileAttachment[] contexts?: ChatContext[] errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required' @@ -110,6 +101,16 @@ import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api' export type CopilotChat = ApiCopilotChat +/** + * A checkpoint entry as returned from the checkpoints API. + */ +export interface CheckpointEntry { + id: string + messageId?: string + workflowState?: Record + createdAt?: string +} + export interface CopilotState { mode: CopilotMode selectedModel: CopilotModelId @@ -122,7 +123,7 @@ export interface CopilotState { messages: CopilotMessage[] workflowId: string | null - messageCheckpoints: Record + messageCheckpoints: Record messageSnapshots: Record isLoading: boolean @@ -210,11 +211,11 @@ export interface CopilotActions { toolCallId?: string ) => void resumeActiveStream: () => Promise - setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void - updateToolCallParams: (toolCallId: string, params: Record) => void + setToolCallState: (toolCall: CopilotToolCall, newState: ClientToolCallState | string) => void + updateToolCallParams: (toolCallId: string, params: Record) => void loadMessageCheckpoints: (chatId: string) => Promise revertToCheckpoint: (checkpointId: string) => Promise - getCheckpointsForMessage: (messageId: string) => any[] + getCheckpointsForMessage: (messageId: string) => CheckpointEntry[] saveMessageCheckpoint: (messageId: string) => Promise clearMessages: () => void diff --git a/apps/sim/stores/workflow-diff/store.ts b/apps/sim/stores/workflow-diff/store.ts index 116fa83d7..339465ec5 100644 --- a/apps/sim/stores/workflow-diff/store.ts +++ b/apps/sim/stores/workflow-diff/store.ts @@ -1,12 +1,7 @@ import { createLogger } from '@sim/logger' - -declare global { - interface Window { - __skipDiffRecording?: boolean - } -} import { create } from 'zustand' import { devtools } from 'zustand/middleware' +import { COPILOT_STATS_API_PATH } from '@/lib/copilot/constants' import { stripWorkflowDiffMarkers, WorkflowDiffEngine } from '@/lib/workflows/diff' import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations' import { validateWorkflowState } from '@/lib/workflows/sanitization/validation' @@ -82,7 +77,7 @@ export const useWorkflowDiffStore = create { + setProposedChanges: async (proposedState, diffAnalysis, options) => { const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId if (!activeWorkflowId) { logger.error('Cannot apply diff without an active workflow') @@ -212,7 +207,7 @@ export const useWorkflowDiffStore = create { + acceptChanges: async (options) => { const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId if (!activeWorkflowId) { logger.error('No active workflow ID found when accepting diff') @@ -307,7 +302,7 @@ export const useWorkflowDiffStore = create {}) + }).catch((error) => { + logger.warn('Failed to send diff-accepted stats', { + error: error instanceof Error ? error.message : String(error), + messageId: triggerMessageId, + }) + }) } findLatestEditWorkflowToolCallId().then((toolCallId) => { @@ -347,7 +347,7 @@ export const useWorkflowDiffStore = create { + rejectChanges: async (options) => { const { baselineWorkflow, baselineWorkflowId, _triggerMessageId, diffAnalysis } = get() const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId @@ -389,7 +389,7 @@ export const useWorkflowDiffStore = create {}) + }).catch((error) => { + logger.warn('Failed to send diff-rejected stats', { + error: error instanceof Error ? error.message : String(error), + messageId: _triggerMessageId, + }) + }) } findLatestEditWorkflowToolCallId().then((toolCallId) => { @@ -460,11 +465,13 @@ export const useWorkflowDiffStore = create { const block = currentBlocks[blockId] - return block && (block as any).is_diff !== 'new' + const blockDiffState = (block as { is_diff?: string } | undefined)?.is_diff + return block && blockDiffState !== 'new' }) || diffAnalysis.edited_blocks?.some((blockId) => { const block = currentBlocks[blockId] - return block && (block as any).is_diff !== 'edited' + const blockDiffState = (block as { is_diff?: string } | undefined)?.is_diff + return block && blockDiffState !== 'edited' }) if (!needsUpdate) { @@ -478,11 +485,12 @@ export const useWorkflowDiffStore = create { const isNewBlock = diffAnalysis.new_blocks?.includes(blockId) const isEditedBlock = diffAnalysis.edited_blocks?.includes(blockId) + const blockDiffState = (block as { is_diff?: string } | undefined)?.is_diff - if (isNewBlock && (block as any).is_diff !== 'new') { + if (isNewBlock && blockDiffState !== 'new') { updatedBlocks[blockId] = { ...block, is_diff: 'new' } hasChanges = true - } else if (isEditedBlock && (block as any).is_diff !== 'edited') { + } else if (isEditedBlock && blockDiffState !== 'edited') { updatedBlocks[blockId] = { ...block, is_diff: 'edited' } // Re-apply field_diffs if available diff --git a/apps/sim/stores/workflow-diff/types.ts b/apps/sim/stores/workflow-diff/types.ts index fe40b0842..8c412b97c 100644 --- a/apps/sim/stores/workflow-diff/types.ts +++ b/apps/sim/stores/workflow-diff/types.ts @@ -13,12 +13,17 @@ export interface WorkflowDiffState { _triggerMessageId?: string | null } +export interface DiffActionOptions { + /** Skip recording this operation for undo/redo. Used during undo/redo replay. */ + skipRecording?: boolean +} + export interface WorkflowDiffActions { - setProposedChanges: (workflowState: WorkflowState, diffAnalysis?: DiffAnalysis) => Promise + setProposedChanges: (workflowState: WorkflowState, diffAnalysis?: DiffAnalysis, options?: DiffActionOptions) => Promise clearDiff: (options?: { restoreBaseline?: boolean }) => void toggleDiffView: () => void - acceptChanges: () => Promise - rejectChanges: () => Promise + acceptChanges: (options?: DiffActionOptions) => Promise + rejectChanges: (options?: DiffActionOptions) => Promise reapplyDiffMarkers: () => void _batchedStateUpdate: (updates: Partial) => void } diff --git a/apps/sim/stores/workflow-diff/utils.ts b/apps/sim/stores/workflow-diff/utils.ts index 27605a268..b5cdd4996 100644 --- a/apps/sim/stores/workflow-diff/utils.ts +++ b/apps/sim/stores/workflow-diff/utils.ts @@ -26,7 +26,7 @@ export function extractSubBlockValues( Object.entries(workflowState.blocks || {}).forEach(([blockId, block]) => { values[blockId] = {} Object.entries(block.subBlocks || {}).forEach(([subBlockId, subBlock]) => { - values[blockId][subBlockId] = (subBlock as any)?.value ?? null + values[blockId][subBlockId] = subBlock?.value ?? null }) }) return values @@ -37,7 +37,7 @@ export function applyWorkflowStateToStores( workflowState: WorkflowState, options?: { updateLastSaved?: boolean } ) { - logger.info('[applyWorkflowStateToStores] Applying state', { + logger.debug('[applyWorkflowStateToStores] Applying state', { workflowId, blockCount: Object.keys(workflowState.blocks || {}).length, edgeCount: workflowState.edges?.length ?? 0, @@ -45,7 +45,7 @@ export function applyWorkflowStateToStores( }) const workflowStore = useWorkflowStore.getState() const cloned = cloneWorkflowState(workflowState) - logger.info('[applyWorkflowStateToStores] Cloned state edges', { + logger.debug('[applyWorkflowStateToStores] Cloned state edges', { clonedEdgeCount: cloned.edges?.length ?? 0, }) workflowStore.replaceWorkflowState(cloned, options) @@ -54,7 +54,8 @@ export function applyWorkflowStateToStores( // Verify what's in the store after apply const afterState = workflowStore.getWorkflowState() - logger.info('[applyWorkflowStateToStores] After apply', { + logger.info('[applyWorkflowStateToStores] Applied workflow state to stores', { + workflowId, afterEdgeCount: afterState.edges?.length ?? 0, }) } @@ -107,7 +108,7 @@ export async function persistWorkflowStateToServer( export async function getLatestUserMessageId(): Promise { try { const { useCopilotStore } = await import('@/stores/panel/copilot/store') - const { messages } = useCopilotStore.getState() as any + const { messages } = useCopilotStore.getState() if (!Array.isArray(messages) || messages.length === 0) { return null } @@ -127,21 +128,19 @@ export async function getLatestUserMessageId(): Promise { export async function findLatestEditWorkflowToolCallId(): Promise { try { const { useCopilotStore } = await import('@/stores/panel/copilot/store') - const { messages, toolCallsById } = useCopilotStore.getState() as any + const { messages, toolCallsById } = useCopilotStore.getState() for (let mi = messages.length - 1; mi >= 0; mi--) { const message = messages[mi] if (message.role !== 'assistant' || !message.contentBlocks) continue - for (const block of message.contentBlocks as any[]) { + for (const block of message.contentBlocks) { if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') { return block.toolCall?.id } } } - const fallback = Object.values(toolCallsById).filter( - (call: any) => call.name === 'edit_workflow' - ) as any[] + const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow') return fallback.length ? fallback[fallback.length - 1].id : undefined } catch (error) { @@ -150,7 +149,7 @@ export async function findLatestEditWorkflowToolCallId(): Promise) => void) { let updateTimer: NodeJS.Timeout | null = null const UPDATE_DEBOUNCE_MS = 16 let pendingUpdates: Partial = {}