Add tools

This commit is contained in:
Siddharth Ganesan
2026-02-06 12:06:09 -08:00
parent 3d5321d9a1
commit 92efd817d2
42 changed files with 632 additions and 190 deletions

View File

@@ -5,10 +5,10 @@ import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { buildConversationHistory } from '@/lib/copilot/chat-context'
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload'
import { generateChatTitle } from '@/lib/copilot/chat-title'
import { getCopilotModel } from '@/lib/copilot/config'
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
@@ -228,7 +228,9 @@ export async function POST(req: NextRequest) {
hasTools: Array.isArray(requestPayload.tools),
toolCount: Array.isArray(requestPayload.tools) ? requestPayload.tools.length : 0,
hasBaseTools: Array.isArray(requestPayload.baseTools),
baseToolCount: Array.isArray(requestPayload.baseTools) ? requestPayload.baseTools.length : 0,
baseToolCount: Array.isArray(requestPayload.baseTools)
? requestPayload.baseTools.length
: 0,
hasCredentials: !!requestPayload.credentials,
})
} catch {}
@@ -370,7 +372,10 @@ export async function POST(req: NextRequest) {
content: nonStreamingResult.content,
toolCalls: nonStreamingResult.toolCalls,
model: selectedModel,
provider: (requestPayload?.provider as Record<string, unknown>)?.provider || env.COPILOT_PROVIDER || 'openai',
provider:
(requestPayload?.provider as Record<string, unknown>)?.provider ||
env.COPILOT_PROVIDER ||
'openai',
}
logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, {

View File

@@ -471,9 +471,9 @@ export class ExecutionEngine {
}
}
private getSerializableExecutionState(
snapshotSeed?: { snapshot: string }
): SerializableExecutionState | undefined {
private getSerializableExecutionState(snapshotSeed?: {
snapshot: string
}): SerializableExecutionState | undefined {
try {
const serializedSnapshot =
snapshotSeed?.snapshot ?? serializePauseSnapshot(this.context, [], this.dag).snapshot

View File

@@ -6,6 +6,7 @@ declare global {
__skipDiffRecording?: boolean
}
}
import type { Edge } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
@@ -914,7 +915,7 @@ export function useUndoRedo() {
// Set flag to skip recording during this operation
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Restore baseline state and broadcast to everyone
if (baselineSnapshot && activeWorkflowId) {
@@ -951,7 +952,7 @@ export function useUndoRedo() {
logger.info('Clearing diff UI state')
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Undid apply-diff operation successfully')
@@ -971,7 +972,7 @@ export function useUndoRedo() {
// Set flag to skip recording during this operation
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Apply the before-accept state (with markers for this user)
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
@@ -1010,7 +1011,7 @@ export function useUndoRedo() {
diffAnalysis: diffAnalysis,
})
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Undid accept-diff operation - restored diff view')
@@ -1024,7 +1025,7 @@ export function useUndoRedo() {
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Apply the before-reject state (with markers for this user)
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
@@ -1061,7 +1062,7 @@ export function useUndoRedo() {
diffAnalysis: diffAnalysis,
})
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Undid reject-diff operation - restored diff view')
@@ -1532,7 +1533,7 @@ export function useUndoRedo() {
// Set flag to skip recording during this operation
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
const diffStore = useWorkflowDiffStore.getState()
@@ -1573,7 +1574,7 @@ export function useUndoRedo() {
diffAnalysis: diffAnalysis,
})
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Redid apply-diff operation')
@@ -1589,7 +1590,7 @@ export function useUndoRedo() {
// Set flag to skip recording during this operation
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Clear diff state FIRST to prevent flash of colors (local UI only)
// Use setState directly to ensure synchronous clearing
@@ -1627,7 +1628,7 @@ export function useUndoRedo() {
operationId: opId,
})
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Redid accept-diff operation - cleared diff view')
@@ -1641,7 +1642,7 @@ export function useUndoRedo() {
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
;window.__skipDiffRecording = true
window.__skipDiffRecording = true
try {
// Clear diff state FIRST to prevent flash of colors (local UI only)
// Use setState directly to ensure synchronous clearing
@@ -1679,7 +1680,7 @@ export function useUndoRedo() {
operationId: opId,
})
} finally {
;window.__skipDiffRecording = false
window.__skipDiffRecording = false
}
logger.info('Redid reject-diff operation - cleared diff view')

View File

@@ -141,7 +141,9 @@ export async function sendStreamingMessage(
resumeFromEventId,
})
} catch (error) {
logger.warn('Failed to log streaming message context preview', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to log streaming message context preview', {
error: error instanceof Error ? error.message : String(error),
})
}
const streamId = request.userMessageId

View File

@@ -44,7 +44,10 @@ export async function processFileAttachments(
const processedFileContents: FileContent[] = []
const requestId = `copilot-${userId}-${Date.now()}`
const processedAttachments = await CopilotFiles.processCopilotAttachments(fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0], requestId)
const processedAttachments = await CopilotFiles.processCopilotAttachments(
fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0],
requestId
)
for (const { buffer, attachment } of processedAttachments) {
const fileContent = createFileContent(buffer, attachment.media_type)

View File

@@ -1,12 +1,12 @@
import { createLogger } from '@sim/logger'
import { env } from '@/lib/core/config/env'
import { processFileAttachments } from '@/lib/copilot/chat-context'
import { getCopilotModel } from '@/lib/copilot/config'
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
import type { CopilotProviderConfig } from '@/lib/copilot/types'
import { env } from '@/lib/core/config/env'
import { tools } from '@/tools/registry'
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
import { type FileContent, processFileAttachments } from '@/lib/copilot/chat-context'
const logger = createLogger('CopilotChatPayload')
@@ -35,7 +35,10 @@ interface ToolSchema {
}
interface CredentialsPayload {
oauth: Record<string, { accessToken: string; accountId: string; name: string; expiresAt?: string }>
oauth: Record<
string,
{ accessToken: string; accountId: string; name: string; expiresAt?: string }
>
apiKeys: string[]
metadata?: {
connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }>
@@ -95,9 +98,17 @@ export async function buildCopilotRequestPayload(
}
): Promise<Record<string, unknown>> {
const {
message, workflowId, userId, userMessageId, mode,
conversationHistory = [], contexts, fileAttachments,
commands, chatId, implicitFeedback,
message,
workflowId,
userId,
userMessageId,
mode,
conversationHistory = [],
contexts,
fileAttachments,
commands,
chatId,
implicitFeedback,
} = params
const selectedModel = options.selectedModel
@@ -115,7 +126,10 @@ export async function buildCopilotRequestPayload(
const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [
{ type: 'text', text: msg.content as string },
]
const processedHistoricalAttachments = await processFileAttachments(msgAttachments as BuildPayloadParams['fileAttachments'] ?? [], userId)
const processedHistoricalAttachments = await processFileAttachments(
(msgAttachments as BuildPayloadParams['fileAttachments']) ?? [],
userId
)
for (const fileContent of processedHistoricalAttachments) {
content.push(fileContent)
}

View File

@@ -24,9 +24,7 @@ export function createUserMessage(
...(contexts && contexts.length > 0 && { contexts }),
...(contexts &&
contexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts, timestamp: Date.now() },
],
contentBlocks: [{ type: 'contexts', contexts, timestamp: Date.now() }],
}),
}
}
@@ -125,7 +123,12 @@ export function stripContinueOptionFromBlocks(blocks: ClientContentBlock[]): Cli
export function beginThinkingBlock(context: ClientStreamingContext) {
if (!context.currentThinkingBlock) {
const newBlock: ClientContentBlock = { type: 'thinking', content: '', timestamp: Date.now(), startTime: Date.now() }
const newBlock: ClientContentBlock = {
type: 'thinking',
content: '',
timestamp: Date.now(),
startTime: Date.now(),
}
context.currentThinkingBlock = newBlock
context.contentBlocks.push(newBlock)
}

View File

@@ -1,22 +1,18 @@
import { createLogger } from '@sim/logger'
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
import {
isBackgroundState,
isRejectedState,
isReviewState,
resolveToolDisplay,
} from '@/lib/copilot/store-utils'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import {
appendTextBlock,
beginThinkingBlock,
finalizeThinkingBlock,
} from './content-blocks'
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
import type { ClientContentBlock, ClientStreamingContext } from './types'
const logger = createLogger('CopilotClientSseHandlers')
@@ -92,7 +88,9 @@ export function flushStreamingUpdates(set: StoreSet) {
...msg,
content: '',
contentBlocks:
update.contentBlocks.length > 0 ? createOptimizedContentBlocks(update.contentBlocks) : [],
update.contentBlocks.length > 0
? createOptimizedContentBlocks(update.contentBlocks)
: [],
}
}
return msg
@@ -183,7 +181,12 @@ function appendThinkingContent(context: ClientStreamingContext, text: string) {
if (context.currentThinkingBlock) {
context.currentThinkingBlock.content += cleanedText
} else {
const newBlock: ClientContentBlock = { type: 'thinking', content: cleanedText, timestamp: Date.now(), startTime: Date.now() }
const newBlock: ClientContentBlock = {
type: 'thinking',
content: cleanedText,
timestamp: Date.now(),
startTime: Date.now(),
}
context.currentThinkingBlock = newBlock
context.contentBlocks.push(newBlock)
}
@@ -218,7 +221,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
tool_result: (data, context, get, set) => {
try {
const eventData = asRecord(data?.data)
const toolCallId: string | undefined = data?.toolCallId || (eventData.id as string | undefined)
const toolCallId: string | undefined =
data?.toolCallId || (eventData.id as string | undefined)
const success: boolean | undefined = data?.success
const failedDependency: boolean = data?.failedDependency === true
const resultObj = asRecord(data?.result)
@@ -251,7 +255,9 @@ export const sseHandlers: Record<string, SSEHandler> = {
try {
const result = asRecord(data?.result) || asRecord(eventData.result)
const input = asRecord(current.params || current.input)
const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined
const todoId = (input.id || input.todoId || result.id || result.todoId) as
| string
| undefined
if (todoId) {
get().updatePlanTodoStatus(todoId, 'completed')
}
@@ -270,7 +276,9 @@ export const sseHandlers: Record<string, SSEHandler> = {
try {
const result = asRecord(data?.result) || asRecord(eventData.result)
const input = asRecord(current.params || current.input)
const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined
const todoId = (input.id || input.todoId || result.id || result.todoId) as
| string
| undefined
if (todoId) {
get().updatePlanTodoStatus(todoId, 'executing')
}
@@ -296,11 +304,13 @@ export const sseHandlers: Record<string, SSEHandler> = {
})
if (hasWorkflowState) {
const diffStore = useWorkflowDiffStore.getState()
diffStore.setProposedChanges(resultPayload.workflowState as WorkflowState).catch((err) => {
logger.error('[SSE] Failed to apply edit_workflow diff', {
error: err instanceof Error ? err.message : String(err),
diffStore
.setProposedChanges(resultPayload.workflowState as WorkflowState)
.catch((err) => {
logger.error('[SSE] Failed to apply edit_workflow diff', {
error: err instanceof Error ? err.message : String(err),
})
})
})
}
} catch (err) {
logger.error('[SSE] edit_workflow result handling failed', {
@@ -350,7 +360,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
tool_error: (data, context, get, set) => {
try {
const errorData = asRecord(data?.data)
const toolCallId: string | undefined = data?.toolCallId || (errorData.id as string | undefined)
const toolCallId: string | undefined =
data?.toolCallId || (errorData.id as string | undefined)
const failedDependency: boolean = data?.failedDependency === true
if (!toolCallId) return
const { toolCallsById } = get()

View File

@@ -1,3 +1,3 @@
export { sseHandlers } from './handlers'
export { subAgentSSEHandlers, applySseEvent } from './subagent-handlers'
export type { SSEHandler } from './handlers'
export { sseHandlers } from './handlers'
export { applySseEvent, subAgentSSEHandlers } from './subagent-handlers'

View File

@@ -6,11 +6,11 @@ import {
shouldSkipToolResultEvent,
} from '@/lib/copilot/orchestrator/sse-utils'
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
import { type SSEHandler, sseHandlers, updateStreamingMessage } from './handlers'
import type { ClientStreamingContext } from './types'
import { sseHandlers, type SSEHandler, updateStreamingMessage } from './handlers'
const logger = createLogger('CopilotClientSubagentHandlers')
@@ -110,7 +110,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
content: (data, context, get, set) => {
const parentToolCallId = context.subAgentParentToolCallId
const contentStr = typeof data.data === 'string' ? data.data : (data.content || '')
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
logger.info('[SubAgent] content event', {
parentToolCallId,
hasData: !!contentStr,
@@ -159,8 +159,9 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
if (!id || !name) return
const isPartial = toolData.partial === true
let args: Record<string, unknown> | undefined =
(toolData.arguments || toolData.input) as Record<string, unknown> | undefined
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
| Record<string, unknown>
| undefined
if (typeof args === 'string') {
try {

View File

@@ -1,4 +1,8 @@
import type { ChatContext, CopilotToolCall, SubAgentContentBlock } from '@/stores/panel/copilot/types'
import type {
ChatContext,
CopilotToolCall,
SubAgentContentBlock,
} from '@/stores/panel/copilot/types'
/**
* A content block used in copilot messages and during streaming.

View File

@@ -1,9 +1,9 @@
import { createLogger } from '@sim/logger'
import { COPILOT_CHECKPOINTS_API_PATH } from '@/lib/copilot/constants'
import type { CopilotMessage, CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import type { CopilotMessage, CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
const logger = createLogger('CopilotMessageCheckpoints')

View File

@@ -19,10 +19,7 @@ export function maskCredentialIdsInValue<T>(value: T, credentialIds: Set<string>
if (typeof value === 'object') {
const masked: Record<string, unknown> = {}
for (const key of Object.keys(value as Record<string, unknown>)) {
masked[key] = maskCredentialIdsInValue(
(value as Record<string, unknown>)[key],
credentialIds
)
masked[key] = maskCredentialIdsInValue((value as Record<string, unknown>)[key], credentialIds)
}
return masked as T
}

View File

@@ -1,4 +1,4 @@
export * from './credential-masking'
export * from './serialization'
export * from './checkpoints'
export * from './credential-masking'
export * from './persist'
export * from './serialization'

View File

@@ -89,8 +89,12 @@ export const sseHandlers: Record<string, SSEHandler> = {
},
tool_generating: (event, context) => {
const data = getEventData(event)
const toolCallId = event.toolCallId || (data?.toolCallId as string | undefined) || (data?.id as string | undefined)
const toolName = event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
const toolCallId =
event.toolCallId ||
(data?.toolCallId as string | undefined) ||
(data?.id as string | undefined)
const toolName =
event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
if (!toolCallId || !toolName) return
if (!context.toolCalls.has(toolCallId)) {
context.toolCalls.set(toolCallId, {
@@ -107,7 +111,9 @@ export const sseHandlers: Record<string, SSEHandler> = {
const toolName = (toolData.name as string | undefined) || event.toolName
if (!toolCallId || !toolName) return
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record<string, unknown> | undefined
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as
| Record<string, unknown>
| undefined
const isPartial = toolData.partial === true
const existing = context.toolCalls.get(toolCallId)
@@ -164,7 +170,11 @@ export const sseHandlers: Record<string, SSEHandler> = {
const isInteractive = options.interactive === true
if (isInterruptTool && isInteractive) {
const decision = await waitForToolDecision(toolCallId, options.timeout || STREAM_TIMEOUT_MS, options.abortSignal)
const decision = await waitForToolDecision(
toolCallId,
options.timeout || STREAM_TIMEOUT_MS,
options.abortSignal
)
if (decision?.status === 'accepted' || decision?.status === 'success') {
await executeToolAndReport(toolCallId, context, execContext, options)
return
@@ -308,7 +318,9 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
const toolName = (toolData.name as string | undefined) || event.toolName
if (!toolCallId || !toolName) return
const isPartial = toolData.partial === true
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record<string, unknown> | undefined
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as
| Record<string, unknown>
| undefined
const existing = context.toolCalls.get(toolCallId)
// Ignore late/duplicate tool_call events once we already have a result.

View File

@@ -58,7 +58,9 @@ export const getEventData = (event: SSEEvent): EventDataObject => {
function getToolCallIdFromEvent(event: SSEEvent): string | undefined {
const data = getEventData(event)
return event.toolCallId || (data?.id as string | undefined) || (data?.toolCallId as string | undefined)
return (
event.toolCallId || (data?.id as string | undefined) || (data?.toolCallId as string | undefined)
)
}
/** Normalizes SSE events so tool metadata is available at the top level. */
@@ -66,8 +68,10 @@ export function normalizeSseEvent(event: SSEEvent): SSEEvent {
if (!event) return event
const data = getEventData(event)
if (!data) return event
const toolCallId = event.toolCallId || (data.id as string | undefined) || (data.toolCallId as string | undefined)
const toolName = event.toolName || (data.name as string | undefined) || (data.toolName as string | undefined)
const toolCallId =
event.toolCallId || (data.id as string | undefined) || (data.toolCallId as string | undefined)
const toolName =
event.toolName || (data.name as string | undefined) || (data.toolName as string | undefined)
const success = event.success ?? (data.success as boolean | undefined)
const result = event.result ?? data.result
const normalizedData = typeof event.data === 'string' ? data : event.data

View File

@@ -29,7 +29,7 @@ export interface StreamLoopOptions extends OrchestratorOptions {
* Called for each normalized event BEFORE standard handler dispatch.
* Return true to skip the default handler for this event.
*/
onBeforeDispatch?: (event: SSEEvent, context: StreamingContext) => boolean | void
onBeforeDispatch?: (event: SSEEvent, context: StreamingContext) => boolean | undefined
}
/**
@@ -78,7 +78,9 @@ export async function runStreamLoop(
if (!response.ok) {
const errorText = await response.text().catch(() => '')
throw new Error(`Copilot backend error (${response.status}): ${errorText || response.statusText}`)
throw new Error(
`Copilot backend error (${response.status}): ${errorText || response.statusText}`
)
}
if (!response.body) {

View File

@@ -49,6 +49,7 @@ import type {
RunWorkflowUntilBlockParams,
SetGlobalWorkflowVariablesParams,
} from './param-types'
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
import {
executeCreateFolder,
executeCreateWorkflow,
@@ -116,13 +117,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
get_deployed_workflow_state: (p, c) =>
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
get_platform_actions: () =>
Promise.resolve({
success: true,
output: { content: PLATFORM_ACTIONS_CONTENT },
}),
set_global_workflow_variables: (p, c) =>
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
redeploy: (_p, c) => executeRedeploy(c),
check_deployment_status: (p, c) => executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
check_deployment_status: (p, c) =>
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
list_workspace_mcp_servers: (p, c) =>
executeListWorkspaceMcpServers(p as ListWorkspaceMcpServersParams, c),
create_workspace_mcp_server: (p, c) =>

View File

@@ -0,0 +1,117 @@
/**
* Static content for the get_platform_actions tool.
* Contains the Sim platform quick reference and keyboard shortcuts.
*/
export const PLATFORM_ACTIONS_CONTENT = `# Sim Platform Quick Reference & Keyboard Shortcuts
## Keyboard Shortcuts
**Mod** = Cmd (macOS) / Ctrl (Windows/Linux). Shortcuts work when canvas is focused.
### Workflow Actions
| Shortcut | Action |
|----------|--------|
| Mod+Enter | Run workflow (or cancel if running) |
| Mod+Z | Undo |
| Mod+Shift+Z | Redo |
| Mod+C | Copy selected blocks |
| Mod+V | Paste blocks |
| Delete/Backspace | Delete selected blocks or edges |
| Shift+L | Auto-layout canvas |
| Mod+Shift+F | Fit to view |
| Mod+Shift+Enter | Accept Copilot changes |
### Panel Navigation
| Shortcut | Action |
|----------|--------|
| C | Focus Copilot tab |
| T | Focus Toolbar tab |
| E | Focus Editor tab |
| Mod+F | Focus Toolbar search |
### Global Navigation
| Shortcut | Action |
|----------|--------|
| Mod+K | Open search |
| Mod+Shift+A | Add new agent workflow |
| Mod+Y | Go to templates |
| Mod+L | Go to logs |
### Utility
| Shortcut | Action |
|----------|--------|
| Mod+D | Clear terminal console |
| Mod+E | Clear notifications |
### Mouse Controls
| Action | Control |
|--------|---------|
| Pan/move canvas | Left-drag on empty space, scroll, or trackpad |
| Select multiple blocks | Right-drag to draw selection box |
| Drag block | Left-drag on block header |
| Add to selection | Mod+Click on blocks |
## Quick Reference — Workspaces
| Action | How |
|--------|-----|
| Create workspace | Click workspace dropdown → New Workspace |
| Switch workspaces | Click workspace dropdown → Select workspace |
| Invite team members | Sidebar → Invite |
| Rename/Duplicate/Export/Delete workspace | Right-click workspace → action |
## Quick Reference — Workflows
| Action | How |
|--------|-----|
| Create workflow | Click + button in sidebar |
| Reorder/move workflows | Drag workflow up/down or onto a folder |
| Import workflow | Click import button in sidebar → Select file |
| Multi-select workflows | Mod+Click or Shift+Click workflows in sidebar |
| Open in new tab | Right-click workflow → Open in New Tab |
| Rename/Color/Duplicate/Export/Delete | Right-click workflow → action |
## Quick Reference — Blocks
| Action | How |
|--------|-----|
| Add a block | Drag from Toolbar panel, or right-click canvas → Add Block |
| Multi-select blocks | Mod+Click additional blocks, or shift-drag selection box |
| Copy/Paste blocks | Mod+C / Mod+V |
| Duplicate/Delete blocks | Right-click → action |
| Rename a block | Click block name in header |
| Enable/Disable block | Right-click → Enable/Disable |
| Lock/Unlock block | Hover block → Click lock icon (Admin only) |
| Toggle handle orientation | Right-click → Toggle Handles |
| Configure a block | Select block → use Editor panel on right |
## Quick Reference — Connections
| Action | How |
|--------|-----|
| Create connection | Drag from output handle to input handle |
| Delete connection | Click edge to select → Delete key |
| Use output in another block | Drag connection tag into input field |
## Quick Reference — Running & Testing
| Action | How |
|--------|-----|
| Run workflow | Click Run Workflow button or Mod+Enter |
| Stop workflow | Click Stop button or Mod+Enter while running |
| Test with chat | Use Chat panel on the right side |
| Run from block | Hover block → Click play button, or right-click → Run from block |
| Run until block | Right-click block → Run until block |
| View execution logs | Open terminal panel at bottom, or Mod+L |
| Filter/Search/Copy/Clear logs | Terminal panel controls |
## Quick Reference — Deployment
| Action | How |
|--------|-----|
| Deploy workflow | Click Deploy button in panel |
| Update deployment | Click Update when changes are detected |
| Revert deployment | Previous versions in Deploy tab → Promote to live |
| Copy API endpoint | Deploy tab → API → Copy API cURL |
## Quick Reference — Variables
| Action | How |
|--------|-----|
| Add/Edit/Delete workflow variable | Panel → Variables → Add Variable |
| Add environment variable | Settings → Environment Variables → Add |
| Reference workflow variable | Use <blockName.itemName> syntax |
| Reference environment variable | Use {{ENV_VAR}} syntax |
`

View File

@@ -1,20 +1,20 @@
import crypto from 'crypto'
import { nanoid } from 'nanoid'
import { createLogger } from '@sim/logger'
import { db } from '@sim/db'
import { apiKey, workflow, workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, max } from 'drizzle-orm'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import { nanoid } from 'nanoid'
import { createApiKey } from '@/lib/api-key/auth'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import { generateRequestId } from '@/lib/core/utils/request'
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { ensureWorkflowAccess, ensureWorkspaceAccess, getDefaultWorkspaceId } from '../access'
import {
getExecutionState,
getLatestExecutionState,
} from '@/lib/workflows/executor/execution-state'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { ensureWorkflowAccess, ensureWorkspaceAccess, getDefaultWorkspaceId } from '../access'
import type {
CreateFolderParams,
CreateWorkflowParams,
@@ -243,7 +243,9 @@ export async function executeSetGlobalWorkflowVariables(
if (type === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed))
return parsed
} catch (error) {
logger.warn('Failed to parse JSON value for variable coercion', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to parse JSON value for variable coercion', {
error: error instanceof Error ? error.message : String(error),
})
}
return value
}
@@ -284,9 +286,7 @@ export async function executeSetGlobalWorkflowVariables(
}
}
const nextVarsRecord = Object.fromEntries(
Object.values(byName).map((v) => [String(v.id), v])
)
const nextVarsRecord = Object.fromEntries(Object.values(byName).map((v) => [String(v.id), v]))
await db
.update(workflow)

View File

@@ -15,8 +15,8 @@ import {
loadWorkflowFromNormalizedTables,
} from '@/lib/workflows/persistence/utils'
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import { normalizeName } from '@/executor/constants'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
ensureWorkflowAccess,
ensureWorkspaceAccess,

View File

@@ -45,25 +45,16 @@ export async function processContexts(
)
}
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
return await processKnowledgeFromDb(
ctx.knowledgeId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processKnowledgeFromDb(ctx.knowledgeId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
return await processBlockMetadata(ctx.blockIds[0], ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'templates' && ctx.templateId) {
return await processTemplateFromDb(
ctx.templateId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processTemplateFromDb(ctx.templateId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'logs' && ctx.executionId) {
return await processExecutionLogFromDb(
ctx.executionId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processExecutionLogFromDb(ctx.executionId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
@@ -100,10 +91,7 @@ export async function processContextsServer(
)
}
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
return await processKnowledgeFromDb(
ctx.knowledgeId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processKnowledgeFromDb(ctx.knowledgeId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
return await processBlockMetadata(
@@ -113,16 +101,10 @@ export async function processContextsServer(
)
}
if (ctx.kind === 'templates' && ctx.templateId) {
return await processTemplateFromDb(
ctx.templateId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processTemplateFromDb(ctx.templateId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'logs' && ctx.executionId) {
return await processExecutionLogFromDb(
ctx.executionId,
ctx.label ? `@${ctx.label}` : '@'
)
return await processExecutionLogFromDb(ctx.executionId, ctx.label ? `@${ctx.label}` : '@')
}
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)

View File

@@ -86,10 +86,7 @@ export function isTerminalState(state: string): boolean {
)
}
export function abortAllInProgressTools(
set: StoreSet,
get: () => CopilotStore
) {
export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore) {
try {
const { toolCallsById, messages } = get()
const updatedMap = { ...toolCallsById }
@@ -166,10 +163,7 @@ export function cleanupActiveState(
set: (partial: Record<string, unknown>) => void,
get: () => Record<string, unknown>
): void {
abortAllInProgressTools(
set as unknown as StoreSet,
get as unknown as () => CopilotStore
)
abortAllInProgressTools(set as unknown as StoreSet, get as unknown as () => CopilotStore)
try {
const { useWorkflowDiffStore } = require('@/stores/workflow-diff/store') as {
useWorkflowDiffStore: {

View File

@@ -183,7 +183,8 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
},
parentId: {
type: 'string',
description: 'Target parent folder ID. Omit or pass empty string to move to workspace root.',
description:
'Target parent folder ID. Omit or pass empty string to move to workspace root.',
},
},
required: ['folderId'],
@@ -203,7 +204,8 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
},
workflow_input: {
type: 'object',
description: 'JSON object with input values. Keys should match the workflow start block input field names.',
description:
'JSON object with input values. Keys should match the workflow start block input field names.',
},
useDeployedState: {
type: 'boolean',
@@ -227,7 +229,8 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
},
stopAfterBlockId: {
type: 'string',
description: 'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
description:
'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
},
workflow_input: {
type: 'object',
@@ -259,7 +262,8 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
},
executionId: {
type: 'string',
description: 'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
description:
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
},
workflow_input: {
type: 'object',
@@ -291,7 +295,8 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
},
executionId: {
type: 'string',
description: 'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
description:
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
},
workflow_input: {
type: 'object',
@@ -331,11 +336,12 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
properties: {
name: {
type: 'string',
description: 'A descriptive name for the API key (e.g., "production-key", "dev-testing").',
description:
'A descriptive name for the API key (e.g., "production-key", "dev-testing").',
},
workspaceId: {
type: 'string',
description: 'Optional workspace ID. Defaults to user\'s default workspace.',
description: "Optional workspace ID. Defaults to user's default workspace.",
},
},
required: ['name'],
@@ -495,7 +501,15 @@ After copilot_edit completes, you can test immediately with copilot_test, or dep
DEPLOYMENT TYPES:
- "deploy as api" - REST API endpoint for programmatic access
- "deploy as chat" - Managed chat UI with auth options
- "deploy as mcp" - Expose as MCP tool for AI agents`,
- "deploy as mcp" - Expose as MCP tool on an MCP server for AI agents to call
MCP DEPLOYMENT FLOW:
The deploy subagent will automatically: list available MCP servers → create one if needed → deploy the workflow as an MCP tool to that server. You can specify server name, tool name, and tool description.
ALSO CAN:
- Get the deployed (production) state to compare with draft
- Generate workspace API keys for calling deployed workflows
- List and create MCP servers in the workspace`,
inputSchema: {
type: 'object',
properties: {
@@ -515,7 +529,13 @@ DEPLOYMENT TYPES:
{
name: 'copilot_test',
agentId: 'test',
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.`,
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
Supports full and partial execution:
- Full run with test inputs
- Stop after a specific block (run_workflow_until_block)
- Run a single block in isolation (run_block)
- Resume from a specific block (run_from_block)`,
inputSchema: {
type: 'object',
properties: {
@@ -590,7 +610,7 @@ DEPLOYMENT TYPES:
name: 'copilot_info',
agentId: 'info',
description:
'Inspect a workflow\'s blocks, connections, outputs, variables, and metadata. Use for questions about the Sim platform itself — how blocks work, what integrations are available, platform concepts, etc. Always provide workflowId to scope results to a specific workflow.',
"Inspect a workflow's blocks, connections, outputs, variables, and metadata. Use for questions about the Sim platform itself — how blocks work, what integrations are available, platform concepts, etc. Always provide workflowId to scope results to a specific workflow.",
inputSchema: {
type: 'object',
properties: {
@@ -644,4 +664,18 @@ DEPLOYMENT TYPES:
required: ['request'],
},
},
{
name: 'copilot_platform',
agentId: 'tour',
description:
'Get help with Sim platform navigation, keyboard shortcuts, and UI actions. Use when the user asks "how do I..." about the Sim editor, wants keyboard shortcuts, or needs to know what actions are available in the UI.',
inputSchema: {
type: 'object',
properties: {
request: { type: 'string' },
context: { type: 'object' },
},
required: ['request'],
},
},
]

View File

@@ -1,9 +1,6 @@
import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import {
GetBlocksAndToolsInput,
GetBlocksAndToolsResult,
} from '@/lib/copilot/tools/shared/schemas'
import { GetBlocksAndToolsInput, GetBlocksAndToolsResult } from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'

View File

@@ -2,10 +2,7 @@ import { existsSync, readFileSync } from 'fs'
import { join } from 'path'
import { createLogger } from '@sim/logger'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import {
GetBlocksMetadataInput,
GetBlocksMetadataResult,
} from '@/lib/copilot/tools/shared/schemas'
import { GetBlocksMetadataInput, GetBlocksMetadataResult } from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry } from '@/blocks/registry'
import { AuthMode, type BlockConfig, isHiddenFromDisplay } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
@@ -291,7 +288,9 @@ export const getBlocksMetadataServerTool: BaseServerTool<
metadata.yamlDocumentation = readFileSync(docPath, 'utf-8')
}
} catch (error) {
logger.warn('Failed to read YAML documentation file', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to read YAML documentation file', {
error: error instanceof Error ? error.message : String(error),
})
}
if (metadata) {
@@ -957,7 +956,9 @@ function resolveToolIdForOperation(blockConfig: BlockConfig, opId: string): stri
}
} catch (error) {
const toolLogger = createLogger('GetBlocksMetadataServerTool')
toolLogger.warn('Failed to resolve tool ID for operation', { error: error instanceof Error ? error.message : String(error) })
toolLogger.warn('Failed to resolve tool ID for operation', {
error: error instanceof Error ? error.message : String(error),
})
}
return undefined
}

View File

@@ -7,6 +7,14 @@ import {
getKnowledgeBaseById,
getKnowledgeBases,
} from '@/lib/knowledge/service'
import {
createTagDefinition,
deleteTagDefinition,
getDocumentTagDefinitions,
getNextAvailableSlot,
getTagUsageStats,
updateTagDefinition,
} from '@/lib/knowledge/tags/service'
import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/search/utils'
const logger = createLogger('KnowledgeBaseServerTool')
@@ -213,10 +221,177 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
}
}
case 'list_tags': {
if (!args.knowledgeBaseId) {
return {
success: false,
message: 'Knowledge base ID is required for list_tags operation',
}
}
const tagDefinitions = await getDocumentTagDefinitions(args.knowledgeBaseId)
logger.info('Tag definitions listed via copilot', {
knowledgeBaseId: args.knowledgeBaseId,
count: tagDefinitions.length,
userId: context.userId,
})
return {
success: true,
message: `Found ${tagDefinitions.length} tag definition(s)`,
data: tagDefinitions.map((td) => ({
id: td.id,
tagSlot: td.tagSlot,
displayName: td.displayName,
fieldType: td.fieldType,
createdAt: td.createdAt,
})),
}
}
case 'create_tag': {
if (!args.knowledgeBaseId) {
return {
success: false,
message: 'Knowledge base ID is required for create_tag operation',
}
}
if (!args.tagDisplayName) {
return {
success: false,
message: 'tagDisplayName is required for create_tag operation',
}
}
const fieldType = args.tagFieldType || 'text'
const tagSlot = await getNextAvailableSlot(args.knowledgeBaseId, fieldType)
if (!tagSlot) {
return {
success: false,
message: `No available slots for field type "${fieldType}". Maximum tags of this type reached.`,
}
}
const requestId = crypto.randomUUID().slice(0, 8)
const newTag = await createTagDefinition(
{
knowledgeBaseId: args.knowledgeBaseId,
tagSlot,
displayName: args.tagDisplayName,
fieldType,
},
requestId
)
logger.info('Tag definition created via copilot', {
knowledgeBaseId: args.knowledgeBaseId,
tagId: newTag.id,
displayName: newTag.displayName,
userId: context.userId,
})
return {
success: true,
message: `Tag "${newTag.displayName}" created successfully`,
data: {
id: newTag.id,
tagSlot: newTag.tagSlot,
displayName: newTag.displayName,
fieldType: newTag.fieldType,
},
}
}
case 'update_tag': {
if (!args.tagDefinitionId) {
return {
success: false,
message: 'tagDefinitionId is required for update_tag operation',
}
}
const updateData: { displayName?: string; fieldType?: string } = {}
if (args.tagDisplayName) updateData.displayName = args.tagDisplayName
if (args.tagFieldType) updateData.fieldType = args.tagFieldType
if (!updateData.displayName && !updateData.fieldType) {
return {
success: false,
message: 'At least one of tagDisplayName or tagFieldType is required for update_tag',
}
}
const requestId = crypto.randomUUID().slice(0, 8)
const updatedTag = await updateTagDefinition(args.tagDefinitionId, updateData, requestId)
logger.info('Tag definition updated via copilot', {
tagId: args.tagDefinitionId,
userId: context.userId,
})
return {
success: true,
message: `Tag "${updatedTag.displayName}" updated successfully`,
data: {
id: updatedTag.id,
tagSlot: updatedTag.tagSlot,
displayName: updatedTag.displayName,
fieldType: updatedTag.fieldType,
},
}
}
case 'delete_tag': {
if (!args.tagDefinitionId) {
return {
success: false,
message: 'tagDefinitionId is required for delete_tag operation',
}
}
const requestId = crypto.randomUUID().slice(0, 8)
const deleted = await deleteTagDefinition(args.tagDefinitionId, requestId)
logger.info('Tag definition deleted via copilot', {
tagId: args.tagDefinitionId,
tagSlot: deleted.tagSlot,
displayName: deleted.displayName,
userId: context.userId,
})
return {
success: true,
message: `Tag "${deleted.displayName}" deleted successfully. All document/chunk references cleared.`,
data: {
tagSlot: deleted.tagSlot,
displayName: deleted.displayName,
},
}
}
case 'get_tag_usage': {
if (!args.knowledgeBaseId) {
return {
success: false,
message: 'Knowledge base ID is required for get_tag_usage operation',
}
}
const requestId = crypto.randomUUID().slice(0, 8)
const stats = await getTagUsageStats(args.knowledgeBaseId, requestId)
return {
success: true,
message: `Retrieved usage stats for ${stats.length} tag(s)`,
data: stats,
}
}
default:
return {
success: false,
message: `Unknown operation: ${operation}. Supported operations: create, list, get, query`,
message: `Unknown operation: ${operation}. Supported operations: create, list, get, query, list_tags, create_tag, update_tag, delete_tag, get_tag_usage`,
}
}
} catch (error) {

View File

@@ -49,7 +49,17 @@ export const searchOnlineServerTool: BaseServerTool<OnlineSearchParams, SearchRe
apiKey: env.EXA_API_KEY ?? '',
})
const output = exaResult.output as { results?: Array<{ title?: string; url?: string; text?: string; summary?: string; publishedDate?: string }> } | undefined
const output = exaResult.output as
| {
results?: Array<{
title?: string
url?: string
text?: string
summary?: string
publishedDate?: string
}>
}
| undefined
const exaResults = output?.results ?? []
if (exaResult.success && exaResults.length > 0) {

View File

@@ -90,7 +90,9 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
const decoded = jwtDecode<{ email?: string; name?: string }>(acc.idToken)
displayName = decoded.email || decoded.name || ''
} catch (error) {
logger.warn('Failed to decode JWT id token', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to decode JWT id token', {
error: error instanceof Error ? error.message : String(error),
})
}
}
if (!displayName && baseProvider === 'github') displayName = `${acc.accountId} (GitHub)`
@@ -110,7 +112,9 @@ export const getCredentialsServerTool: BaseServerTool<GetCredentialsParams, any>
)
accessToken = refreshedToken || accessToken
} catch (error) {
logger.warn('Failed to refresh OAuth access token', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to refresh OAuth access token', {
error: error instanceof Error ? error.message : String(error),
})
}
connectedCredentials.push({
id: acc.id,

View File

@@ -7,7 +7,7 @@ import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
import type { EditWorkflowOperation, SkippedItem, ValidationError } from './types'
import { UUID_REGEX, logSkippedItem } from './types'
import { logSkippedItem, UUID_REGEX } from './types'
import {
validateInputsForBlock,
validateSourceHandleForBlock,

View File

@@ -238,8 +238,8 @@ export function applyOperationsToWorkflowState(
totalEdges: (modifiedState as any).edges?.length,
})
}
// Regenerate loops and parallels after modifications
;(modifiedState as any).loops = generateLoopBlocks((modifiedState as any).blocks)
;(modifiedState as any).parallels = generateParallelBlocks((modifiedState as any).blocks)

View File

@@ -3,7 +3,6 @@ import { workflow as workflowTable } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { applyAutoLayout } from '@/lib/workflows/autolayout'
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
import {
@@ -11,6 +10,7 @@ import {
saveWorkflowToNormalizedTables,
} from '@/lib/workflows/persistence/utils'
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { applyOperationsToWorkflowState } from './engine'
import type { EditWorkflowParams, ValidationError } from './types'
@@ -214,7 +214,8 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
: undefined
// Format skipped items for LLM feedback
const skippedMessages = skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
const skippedMessages =
skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
// Persist the workflow state to the database
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@sim/logger'
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { getBlock } from '@/blocks/registry'
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
import { RESERVED_BLOCK_NAMES, normalizeName } from '@/executor/constants'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
import {
addConnectionsAsEdges,
@@ -242,7 +242,11 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
const editBlockConfig = getBlock(block.type)
if (editBlockConfig) {
updateCanonicalModesForInputs(block, Object.keys(validationResult.validInputs), editBlockConfig)
updateCanonicalModesForInputs(
block,
Object.keys(validationResult.validInputs),
editBlockConfig
)
}
}
@@ -341,7 +345,8 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
// Remove edges to/from removed children
modifiedState.edges = modifiedState.edges.filter(
(edge: any) => !existingChildren.includes(edge.source) && !existingChildren.includes(edge.target)
(edge: any) =>
!existingChildren.includes(edge.source) && !existingChildren.includes(edge.target)
)
// Add new nested blocks
@@ -428,7 +433,8 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
block.data.parallelType = params.inputs.parallelType
}
}
const effectiveParallelType = params.inputs?.parallelType ?? block.data.parallelType ?? 'count'
const effectiveParallelType =
params.inputs?.parallelType ?? block.data.parallelType ?? 'count'
// count only valid for 'count' parallelType
if (params.inputs?.count && effectiveParallelType === 'count') {
block.data.count = params.inputs.count
@@ -489,14 +495,19 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
params.removeEdges.forEach(({ targetBlockId, sourceHandle = 'source' }) => {
modifiedState.edges = modifiedState.edges.filter(
(edge: any) =>
!(edge.source === block_id && edge.target === targetBlockId && edge.sourceHandle === sourceHandle)
!(
edge.source === block_id &&
edge.target === targetBlockId &&
edge.sourceHandle === sourceHandle
)
)
})
}
}
export function handleAddOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } = ctx
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } =
ctx
const { block_id, params } = op
const addNormalizedName = params?.name ? normalizeName(params.name) : ''
@@ -522,7 +533,11 @@ export function handleAddOperation(op: EditWorkflowOperation, ctx: OperationCont
return
}
const conflictingBlock = findBlockWithDuplicateNormalizedName(modifiedState.blocks, params.name, block_id)
const conflictingBlock = findBlockWithDuplicateNormalizedName(
modifiedState.blocks,
params.name,
block_id
)
if (conflictingBlock) {
logSkippedItem(skippedItems, {
@@ -580,7 +595,10 @@ export function handleAddOperation(op: EditWorkflowOperation, ctx: OperationCont
}
// Check single-instance block constraints (e.g., Response block)
const singleInstanceIssue = TriggerUtils.getSingleInstanceBlockIssue(modifiedState.blocks, params.type)
const singleInstanceIssue = TriggerUtils.getSingleInstanceBlockIssue(
modifiedState.blocks,
params.type
)
if (singleInstanceIssue) {
logSkippedItem(skippedItems, {
type: 'duplicate_single_instance_block',
@@ -614,9 +632,11 @@ export function handleAddOperation(op: EditWorkflowOperation, ctx: OperationCont
...newBlock.data,
loopType,
// Only include type-appropriate fields
...(loopType === 'forEach' && params.inputs?.collection && { collection: params.inputs.collection }),
...(loopType === 'forEach' &&
params.inputs?.collection && { collection: params.inputs.collection }),
...(loopType === 'for' && params.inputs?.iterations && { count: params.inputs.iterations }),
...(loopType === 'while' && params.inputs?.condition && { whileCondition: params.inputs.condition }),
...(loopType === 'while' &&
params.inputs?.condition && { whileCondition: params.inputs.condition }),
...(loopType === 'doWhile' &&
params.inputs?.condition && { doWhileCondition: params.inputs.condition }),
}
@@ -717,7 +737,8 @@ export function handleInsertIntoSubflowOperation(
op: EditWorkflowOperation,
ctx: OperationContext
): void {
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } = ctx
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } =
ctx
const { block_id, params } = op
const subflowId = params?.subflowId

View File

@@ -90,7 +90,9 @@ function normalizeErrorMessage(errorValue: unknown): string | undefined {
try {
return JSON.stringify(errorValue)
} catch (error) {
logger.warn('Failed to stringify error value', { error: error instanceof Error ? error.message : String(error) })
logger.warn('Failed to stringify error value', {
error: error instanceof Error ? error.message : String(error),
})
}
}
try {

View File

@@ -72,7 +72,17 @@ export type GetBlockConfigResultType = z.infer<typeof GetBlockConfigResult>
// knowledge_base - shared schema used by client tool, server tool, and registry
export const KnowledgeBaseArgsSchema = z.object({
operation: z.enum(['create', 'list', 'get', 'query']),
operation: z.enum([
'create',
'list',
'get',
'query',
'list_tags',
'create_tag',
'update_tag',
'delete_tag',
'get_tag_usage',
]),
args: z
.object({
/** Name of the knowledge base (required for create) */
@@ -81,7 +91,7 @@ export const KnowledgeBaseArgsSchema = z.object({
description: z.string().optional(),
/** Workspace ID to associate with (required for create, optional for list) */
workspaceId: z.string().optional(),
/** Knowledge base ID (required for get, query) */
/** Knowledge base ID (required for get, query, list_tags, create_tag, get_tag_usage) */
knowledgeBaseId: z.string().optional(),
/** Search query text (required for query) */
query: z.string().optional(),
@@ -95,6 +105,12 @@ export const KnowledgeBaseArgsSchema = z.object({
overlap: z.number().min(0).max(500).default(200),
})
.optional(),
/** Tag definition ID (required for update_tag, delete_tag) */
tagDefinitionId: z.string().optional(),
/** Tag display name (required for create_tag, optional for update_tag) */
tagDisplayName: z.string().optional(),
/** Tag field type: text, number, date, boolean (optional for create_tag, defaults to text) */
tagFieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(),
})
.optional(),
})

View File

@@ -21,7 +21,6 @@ import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { redactApiKeys } from '@/lib/core/security/redaction'
import { filterForDisplay } from '@/lib/core/utils/display-filters'
import type { SerializableExecutionState } from '@/executor/execution/types'
import { emitWorkflowExecutionCompleted } from '@/lib/logs/events'
import { snapshotService } from '@/lib/logs/execution/snapshot/service'
import type {
@@ -35,6 +34,7 @@ import type {
WorkflowState,
} from '@/lib/logs/types'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import type { SerializableExecutionState } from '@/executor/execution/types'
export interface ToolCall {
name: string

View File

@@ -3,7 +3,6 @@ import { workflowExecutionLogs } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, sql } from 'drizzle-orm'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import type { SerializableExecutionState } from '@/executor/execution/types'
import { executionLogger } from '@/lib/logs/execution/logger'
import {
calculateCostSummary,
@@ -18,6 +17,7 @@ import type {
TraceSpan,
WorkflowState,
} from '@/lib/logs/types'
import type { SerializableExecutionState } from '@/executor/execution/types'
const logger = createLogger('LoggingSession')

View File

@@ -1,6 +1,6 @@
import type { Edge } from 'reactflow'
import type { BlockLog, NormalizedBlockOutput } from '@/executor/types'
import type { SerializableExecutionState } from '@/executor/execution/types'
import type { BlockLog, NormalizedBlockOutput } from '@/executor/types'
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
import type { Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'

View File

@@ -1,2 +1,7 @@
export { BlockSchemaResolver, blockSchemaResolver } from './schema-resolver'
export type { ResolvedBlock, ResolvedSubBlock, ResolvedOption, ResolvedOutput } from './schema-types'
export type {
ResolvedBlock,
ResolvedOption,
ResolvedOutput,
ResolvedSubBlock,
} from './schema-types'

View File

@@ -1,7 +1,12 @@
import { createLogger } from '@sim/logger'
import { getAllBlocks, getBlock } from '@/blocks/registry'
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
import type { ResolvedBlock, ResolvedOption, ResolvedOutput, ResolvedSubBlock } from './schema-types'
import type {
ResolvedBlock,
ResolvedOption,
ResolvedOutput,
ResolvedSubBlock,
} from './schema-types'
const logger = createLogger('BlockSchemaResolver')
@@ -86,7 +91,7 @@ export class BlockSchemaResolver {
}
if (!resolved.validation?.min && !resolved.validation?.max && !resolved.validation?.pattern) {
delete resolved.validation
resolved.validation = undefined
}
return resolved
@@ -187,7 +192,9 @@ export class BlockSchemaResolver {
private supportsTriggerMode(config: BlockConfig): boolean {
return Boolean(
config.triggerAllowed ||
config.subBlocks.some((subBlock) => subBlock.id === 'triggerMode' || subBlock.mode === 'trigger')
config.subBlocks.some(
(subBlock) => subBlock.id === 'triggerMode' || subBlock.mode === 'trigger'
)
)
}

View File

@@ -42,15 +42,14 @@ import {
} from '@/lib/copilot/messages'
import type { CopilotTransportMode } from '@/lib/copilot/models'
import { parseSSEStream } from '@/lib/copilot/orchestrator/sse-parser'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import {
abortAllInProgressTools,
cleanupActiveState,
isRejectedState,
isTerminalState,
resolveToolDisplay,
stripTodoTags,
} from '@/lib/copilot/store-utils'
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
import { getQueryClient } from '@/app/_shell/providers/query-provider'
import { subscriptionKeys } from '@/hooks/queries/subscription'
import type {
@@ -277,13 +276,7 @@ function prepareSendContext(
isSendingMessage,
abortController: activeAbortController,
} = get()
const {
stream = true,
fileAttachments,
contexts,
messageId,
queueIfBusy = true,
} = options
const { stream = true, fileAttachments, contexts, messageId, queueIfBusy = true } = options
if (!workflowId) return null
@@ -381,9 +374,13 @@ function prepareSendContext(
? `${message.substring(0, OPTIMISTIC_TITLE_MAX_LENGTH - 3)}...`
: message
set((state) => ({
currentChat: state.currentChat ? { ...state.currentChat, title: optimisticTitle } : state.currentChat,
currentChat: state.currentChat
? { ...state.currentChat, title: optimisticTitle }
: state.currentChat,
chats: state.currentChat
? state.chats.map((c) => (c.id === state.currentChat!.id ? { ...c, title: optimisticTitle } : c))
? state.chats.map((c) =>
c.id === state.currentChat!.id ? { ...c, title: optimisticTitle } : c
)
: state.chats,
}))
}
@@ -416,7 +413,9 @@ async function initiateStream(
kind: c?.kind,
chatId: c?.kind === 'past_chat' ? c.chatId : undefined,
workflowId:
c?.kind === 'workflow' || c?.kind === 'current_workflow' || c?.kind === 'workflow_block'
c?.kind === 'workflow' ||
c?.kind === 'current_workflow' ||
c?.kind === 'workflow_block'
? c.workflowId
: undefined,
label: c?.label,
@@ -435,7 +434,8 @@ async function initiateStream(
})
}
const apiMode: CopilotTransportMode = mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
const apiMode: CopilotTransportMode =
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
const uiToApiCommandMap: Record<string, string> = { actions: 'superagent' }
const commands = contexts
?.filter((c) => c.kind === 'slash_command' && 'command' in c)
@@ -532,7 +532,9 @@ async function finalizeStream(
const errorMessage = createErrorMessage(prepared.streamingMessage.id, errorContent, errorType)
set((state) => ({
messages: state.messages.map((m) => (m.id === prepared.streamingMessage.id ? errorMessage : m)),
messages: state.messages.map((m) =>
m.id === prepared.streamingMessage.id ? errorMessage : m
),
error: errorContent,
isSendingMessage: false,
abortController: null,
@@ -726,10 +728,7 @@ function finalizeResume(
const hasContinueTag =
(typeof m.content === 'string' && m.content.includes(CONTINUE_OPTIONS_TAG)) ||
(Array.isArray(m.contentBlocks) &&
m.contentBlocks.some(
(b) =>
b.type === 'text' && b.content?.includes(CONTINUE_OPTIONS_TAG)
))
m.contentBlocks.some((b) => b.type === 'text' && b.content?.includes(CONTINUE_OPTIONS_TAG)))
if (!hasContinueTag) return m
cleanedExisting = true
return {
@@ -765,13 +764,16 @@ function finalizeResume(
} else if (replay.bufferedContent || (replay.replayBlocks && replay.replayBlocks.length > 0)) {
nextMessages = nextMessages.map((m) => {
if (m.id !== replay.nextStream.assistantMessageId) return m
let nextBlocks = replay.replayBlocks && replay.replayBlocks.length > 0 ? replay.replayBlocks : null
let nextBlocks =
replay.replayBlocks && replay.replayBlocks.length > 0 ? replay.replayBlocks : null
if (!nextBlocks) {
const existingBlocks = Array.isArray(m.contentBlocks) ? m.contentBlocks : []
const existingText = extractTextFromBlocks(existingBlocks)
if (existingText && replay.bufferedContent.startsWith(existingText)) {
const delta = replay.bufferedContent.slice(existingText.length)
nextBlocks = delta ? appendTextToBlocks(existingBlocks, delta) : cloneContentBlocks(existingBlocks)
nextBlocks = delta
? appendTextToBlocks(existingBlocks, delta)
: cloneContentBlocks(existingBlocks)
} else if (!existingText && existingBlocks.length === 0) {
nextBlocks = replay.bufferedContent
? [{ type: TEXT_BLOCK_TYPE, content: replay.bufferedContent, timestamp: Date.now() }]
@@ -852,7 +854,10 @@ async function resumeFromLiveStream(
set({ isSendingMessage: false, abortController: null })
} catch (error) {
if (error instanceof Error && (error.name === 'AbortError' || error.message.includes('aborted'))) {
if (
error instanceof Error &&
(error.name === 'AbortError' || error.message.includes('aborted'))
) {
logger.info('[Copilot] Resume stream aborted by user')
set({ isSendingMessage: false, abortController: null })
return false
@@ -1764,7 +1769,8 @@ export const useCopilotStore = create<CopilotStore>()(
if (abortSignal?.aborted) {
context.wasAborted = true
const { suppressAbortContinueOption } = get()
context.suppressContinueOption = suppressAbortContinueOption === true || isPageUnloading()
context.suppressContinueOption =
suppressAbortContinueOption === true || isPageUnloading()
if (suppressAbortContinueOption) {
set({ suppressAbortContinueOption: false })
}

View File

@@ -19,7 +19,11 @@ export interface DiffActionOptions {
}
export interface WorkflowDiffActions {
setProposedChanges: (workflowState: WorkflowState, diffAnalysis?: DiffAnalysis, options?: DiffActionOptions) => Promise<void>
setProposedChanges: (
workflowState: WorkflowState,
diffAnalysis?: DiffAnalysis,
options?: DiffActionOptions
) => Promise<void>
clearDiff: (options?: { restoreBaseline?: boolean }) => void
toggleDiffView: () => void
acceptChanges: (options?: DiffActionOptions) => Promise<void>