mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-06 04:35:03 -05:00
Refactor complete - no testing yet
This commit is contained in:
@@ -1,35 +0,0 @@
|
||||
# lib/copilot/tools/server/workflow/edit-workflow.ts
|
||||
90-98 ( 9 lines) [function] logSkippedItem
|
||||
103-113 ( 11 lines) [function] findBlockWithDuplicateNormalizedName
|
||||
127-196 ( 70 lines) [function] validateInputsForBlock
|
||||
211-463 ( 253 lines) [function] validateValueForSubBlockType
|
||||
481-566 ( 86 lines) [function] topologicalSortInserts
|
||||
571-684 ( 114 lines) [function] createBlockFromParams
|
||||
686-716 ( 31 lines) [function] updateCanonicalModesForInputs
|
||||
721-762 ( 42 lines) [function] normalizeTools
|
||||
786-804 ( 19 lines) [function] normalizeArrayWithIds
|
||||
809-811 ( 3 lines) [function] shouldNormalizeArrayIds
|
||||
818-859 ( 42 lines) [function] normalizeResponseFormat
|
||||
834-847 ( 14 lines) [arrow] sortKeys
|
||||
871-945 ( 75 lines) [function] validateSourceHandleForBlock
|
||||
956-1051 ( 96 lines) [function] validateConditionHandle
|
||||
1062-1136 ( 75 lines) [function] validateRouterHandle
|
||||
1141-1149 ( 9 lines) [function] validateTargetHandle
|
||||
1155-1261 ( 107 lines) [function] createValidatedEdge
|
||||
1270-1307 ( 38 lines) [function] addConnectionsAsEdges
|
||||
1280-1291 ( 12 lines) [arrow] addEdgeForTarget
|
||||
1309-1339 ( 31 lines) [function] applyTriggerConfigToBlockSubblocks
|
||||
1353-1361 ( 9 lines) [function] isBlockTypeAllowed
|
||||
1367-1404 ( 38 lines) [function] filterDisallowedTools
|
||||
1413-1499 ( 87 lines) [function] normalizeBlockIdsInOperations
|
||||
1441-1444 ( 4 lines) [arrow] replaceId
|
||||
1504-2676 (1173 lines) [function] applyOperationsToWorkflowState
|
||||
1649-1656 ( 8 lines) [arrow] findChildren
|
||||
2055-2059 ( 5 lines) [arrow] mapConnectionTypeToHandle
|
||||
2063-2074 ( 12 lines) [arrow] addEdgeForTarget
|
||||
2682-2777 ( 96 lines) [function] validateWorkflowSelectorIds
|
||||
2786-3066 ( 281 lines) [function] preValidateCredentialInputs
|
||||
2820-2845 ( 26 lines) [function] collectCredentialInputs
|
||||
2850-2870 ( 21 lines) [function] collectHostedApiKeyInput
|
||||
3068-3117 ( 50 lines) [function] getCurrentWorkflowStateFromDb
|
||||
3121-3333 ( 213 lines) [method] <anonymous class>.execute
|
||||
@@ -1,21 +0,0 @@
|
||||
# lib/copilot/tools/server/blocks/get-blocks-metadata-tool.ts
|
||||
108-306 ( 199 lines) [method] <anonymous class>.execute
|
||||
309-384 ( 76 lines) [function] transformBlockMetadata
|
||||
386-459 ( 74 lines) [function] extractInputs
|
||||
461-503 ( 43 lines) [function] extractOperationInputs
|
||||
505-518 ( 14 lines) [function] extractOutputs
|
||||
520-538 ( 19 lines) [function] formatOutputsFromDefinition
|
||||
540-563 ( 24 lines) [function] mapSchemaTypeToSimpleType
|
||||
565-591 ( 27 lines) [function] generateInputExample
|
||||
593-669 ( 77 lines) [function] processSubBlock
|
||||
671-679 ( 9 lines) [function] resolveAuthType
|
||||
686-702 ( 17 lines) [function] getStaticModelOptions
|
||||
712-754 ( 43 lines) [function] callOptionsWithFallback
|
||||
756-806 ( 51 lines) [function] resolveSubblockOptions
|
||||
808-820 ( 13 lines) [function] removeNullish
|
||||
822-832 ( 11 lines) [function] normalizeCondition
|
||||
834-872 ( 39 lines) [function] splitParametersByOperation
|
||||
874-905 ( 32 lines) [function] computeBlockLevelInputs
|
||||
907-935 ( 29 lines) [function] computeOperationLevelInputs
|
||||
937-947 ( 11 lines) [function] resolveOperationIds
|
||||
949-961 ( 13 lines) [function] resolveToolIdForOperation
|
||||
@@ -1,13 +0,0 @@
|
||||
# lib/copilot/process-contents.ts
|
||||
31-81 ( 51 lines) [function] processContexts
|
||||
84-161 ( 78 lines) [function] processContextsServer
|
||||
163-208 ( 46 lines) [function] sanitizeMessageForDocs
|
||||
210-248 ( 39 lines) [function] processPastChatFromDb
|
||||
250-281 ( 32 lines) [function] processWorkflowFromDb
|
||||
283-316 ( 34 lines) [function] processPastChat
|
||||
319-321 ( 3 lines) [function] processPastChatViaApi
|
||||
323-362 ( 40 lines) [function] processKnowledgeFromDb
|
||||
364-439 ( 76 lines) [function] processBlockMetadata
|
||||
441-473 ( 33 lines) [function] processTemplateFromDb
|
||||
475-498 ( 24 lines) [function] processWorkflowBlockFromDb
|
||||
500-555 ( 56 lines) [function] processExecutionLogFromDb
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
} from '@/lib/copilot/store-utils'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
appendTextBlock,
|
||||
beginThinkingBlock,
|
||||
@@ -295,7 +296,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
})
|
||||
if (hasWorkflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore.setProposedChanges(resultPayload.workflowState).catch((err) => {
|
||||
diffStore.setProposedChanges(resultPayload.workflowState as WorkflowState).catch((err) => {
|
||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
|
||||
@@ -14,15 +14,16 @@ export interface OrchestrateStreamOptions extends OrchestratorOptions {
|
||||
}
|
||||
|
||||
export async function orchestrateCopilotStream(
|
||||
requestPayload: Record<string, any>,
|
||||
requestPayload: Record<string, unknown>,
|
||||
options: OrchestrateStreamOptions
|
||||
): Promise<OrchestratorResult> {
|
||||
const { userId, workflowId, chatId } = options
|
||||
const execContext = await prepareExecutionContext(userId, workflowId)
|
||||
|
||||
const payloadMsgId = requestPayload?.messageId
|
||||
const context = createStreamingContext({
|
||||
chatId,
|
||||
messageId: requestPayload?.messageId || crypto.randomUUID(),
|
||||
messageId: typeof payloadMsgId === 'string' ? payloadMsgId : crypto.randomUUID(),
|
||||
})
|
||||
|
||||
try {
|
||||
|
||||
@@ -344,7 +344,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
const parentToolCallId = context.subAgentParentToolCallId
|
||||
if (!parentToolCallId) return
|
||||
const data = getEventData(event)
|
||||
const toolCallId = event.toolCallId || data?.id
|
||||
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||
if (!toolCallId) return
|
||||
|
||||
// Update in subAgentToolCalls.
|
||||
@@ -364,14 +364,20 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||
subAgentToolCall.status = status
|
||||
subAgentToolCall.endTime = endTime
|
||||
if (result) subAgentToolCall.result = result
|
||||
if (hasError) subAgentToolCall.error = data?.error || data?.result?.error
|
||||
if (hasError) {
|
||||
const resultObj = asRecord(data?.result)
|
||||
subAgentToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||
}
|
||||
}
|
||||
|
||||
if (mainToolCall) {
|
||||
mainToolCall.status = status
|
||||
mainToolCall.endTime = endTime
|
||||
if (result) mainToolCall.result = result
|
||||
if (hasError) mainToolCall.error = data?.error || data?.result?.error
|
||||
if (hasError) {
|
||||
const resultObj = asRecord(data?.result)
|
||||
mainToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -44,29 +44,29 @@ export async function processContexts(
|
||||
ctx.kind
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
||||
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||
return await processKnowledgeFromDb(
|
||||
(ctx as any).knowledgeId,
|
||||
ctx.knowledgeId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
||||
return await processBlockMetadata((ctx as any).blockId, ctx.label ? `@${ctx.label}` : '@')
|
||||
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||
return await processBlockMetadata(ctx.blockIds[0], ctx.label ? `@${ctx.label}` : '@')
|
||||
}
|
||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
||||
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||
return await processTemplateFromDb(
|
||||
(ctx as any).templateId,
|
||||
ctx.templateId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
||||
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||
return await processExecutionLogFromDb(
|
||||
(ctx as any).executionId,
|
||||
ctx.executionId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||
}
|
||||
// Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs
|
||||
return null
|
||||
@@ -99,33 +99,33 @@ export async function processContextsServer(
|
||||
ctx.kind
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
||||
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||
return await processKnowledgeFromDb(
|
||||
(ctx as any).knowledgeId,
|
||||
ctx.knowledgeId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
||||
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||
return await processBlockMetadata(
|
||||
(ctx as any).blockId,
|
||||
ctx.blockIds[0],
|
||||
ctx.label ? `@${ctx.label}` : '@',
|
||||
userId
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
||||
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||
return await processTemplateFromDb(
|
||||
(ctx as any).templateId,
|
||||
ctx.templateId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
||||
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||
return await processExecutionLogFromDb(
|
||||
(ctx as any).executionId,
|
||||
ctx.executionId,
|
||||
ctx.label ? `@${ctx.label}` : '@'
|
||||
)
|
||||
}
|
||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||
}
|
||||
if (ctx.kind === 'docs') {
|
||||
try {
|
||||
|
||||
@@ -1,4 +1,20 @@
|
||||
export interface BaseServerTool<TArgs = any, TResult = any> {
|
||||
name: string
|
||||
execute(args: TArgs, context?: { userId: string }): Promise<TResult>
|
||||
import type { z } from 'zod'
|
||||
|
||||
export interface ServerToolContext {
|
||||
userId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Base interface for server-side copilot tools.
|
||||
*
|
||||
* Tools can optionally declare Zod schemas for input/output validation.
|
||||
* If provided, the router validates automatically.
|
||||
*/
|
||||
export interface BaseServerTool<TArgs = unknown, TResult = unknown> {
|
||||
name: string
|
||||
execute(args: TArgs, context?: ServerToolContext): Promise<TResult>
|
||||
/** Optional Zod schema for input validation */
|
||||
inputSchema?: z.ZodType<TArgs>
|
||||
/** Optional Zod schema for output validation */
|
||||
outputSchema?: z.ZodType<TResult>
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockConfigInput,
|
||||
type GetBlockConfigInputType,
|
||||
GetBlockConfigResult,
|
||||
type GetBlockConfigResultType,
|
||||
@@ -370,6 +371,8 @@ export const getBlockConfigServerTool: BaseServerTool<
|
||||
GetBlockConfigResultType
|
||||
> = {
|
||||
name: 'get_block_config',
|
||||
inputSchema: GetBlockConfigInput,
|
||||
outputSchema: GetBlockConfigResult,
|
||||
async execute(
|
||||
{ blockType, operation, trigger }: GetBlockConfigInputType,
|
||||
context?: { userId: string }
|
||||
|
||||
@@ -14,6 +14,8 @@ export const getBlockOptionsServerTool: BaseServerTool<
|
||||
GetBlockOptionsResultType
|
||||
> = {
|
||||
name: 'get_block_options',
|
||||
inputSchema: GetBlockOptionsInput,
|
||||
outputSchema: GetBlockOptionsResult,
|
||||
async execute(
|
||||
{ blockId }: GetBlockOptionsInputType,
|
||||
context?: { userId: string }
|
||||
|
||||
@@ -13,6 +13,8 @@ export const getBlocksAndToolsServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksAndToolsResult.parse>
|
||||
> = {
|
||||
name: 'get_blocks_and_tools',
|
||||
inputSchema: GetBlocksAndToolsInput,
|
||||
outputSchema: GetBlocksAndToolsResult,
|
||||
async execute(_args: unknown, context?: { userId: string }) {
|
||||
const logger = createLogger('GetBlocksAndToolsServerTool')
|
||||
logger.debug('Executing get_blocks_and_tools')
|
||||
|
||||
@@ -105,6 +105,8 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksMetadataResult.parse>
|
||||
> = {
|
||||
name: 'get_blocks_metadata',
|
||||
inputSchema: GetBlocksMetadataInput,
|
||||
outputSchema: GetBlocksMetadataResult,
|
||||
async execute(
|
||||
{ blockIds }: ReturnType<typeof GetBlocksMetadataInput.parse>,
|
||||
context?: { userId: string }
|
||||
|
||||
@@ -15,6 +15,8 @@ export const getTriggerBlocksServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetTriggerBlocksResult.parse>
|
||||
> = {
|
||||
name: 'get_trigger_blocks',
|
||||
inputSchema: GetTriggerBlocksInput,
|
||||
outputSchema: GetTriggerBlocksResult,
|
||||
async execute(_args: unknown, context?: { userId: string }) {
|
||||
const logger = createLogger('GetTriggerBlocksServerTool')
|
||||
logger.debug('Executing get_trigger_blocks')
|
||||
|
||||
@@ -3,22 +3,34 @@ import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { executeTool } from '@/tools'
|
||||
import type { TableRow } from '@/tools/types'
|
||||
|
||||
const RESULT_CHAR_CAP = Number(process.env.COPILOT_TOOL_RESULT_CHAR_CAP || 20000)
|
||||
|
||||
interface MakeApiRequestParams {
|
||||
url: string
|
||||
method: 'GET' | 'POST' | 'PUT'
|
||||
queryParams?: Record<string, string | number | boolean>
|
||||
headers?: Record<string, string>
|
||||
body?: any
|
||||
body?: unknown
|
||||
}
|
||||
|
||||
export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, any> = {
|
||||
interface ApiResponse {
|
||||
data: string
|
||||
status: number
|
||||
headers: Record<string, string>
|
||||
truncated?: boolean
|
||||
totalChars?: number
|
||||
previewChars?: number
|
||||
note?: string
|
||||
}
|
||||
|
||||
export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, ApiResponse> = {
|
||||
name: 'make_api_request',
|
||||
async execute(params: MakeApiRequestParams): Promise<any> {
|
||||
async execute(params: MakeApiRequestParams): Promise<ApiResponse> {
|
||||
const logger = createLogger('MakeApiRequestServerTool')
|
||||
const { url, method, queryParams, headers, body } = params || ({} as MakeApiRequestParams)
|
||||
const { url, method, queryParams, headers, body } = params
|
||||
if (!url || !method) throw new Error('url and method are required')
|
||||
|
||||
const toTableRows = (obj?: Record<string, any>): TableRow[] | null => {
|
||||
const toTableRows = (obj?: Record<string, unknown>): TableRow[] | null => {
|
||||
if (!obj || typeof obj !== 'object') return null
|
||||
return Object.entries(obj).map(([key, value]) => ({
|
||||
id: key,
|
||||
@@ -26,21 +38,22 @@ export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, any>
|
||||
}))
|
||||
}
|
||||
const headersTable = toTableRows(headers)
|
||||
const queryParamsTable = toTableRows(queryParams as Record<string, any> | undefined)
|
||||
const queryParamsTable = toTableRows(queryParams as Record<string, unknown> | undefined)
|
||||
|
||||
const result = await executeTool(
|
||||
'http_request',
|
||||
{ url, method, params: queryParamsTable, headers: headersTable, body },
|
||||
true
|
||||
)
|
||||
if (!result.success) throw new Error(result.error || 'API request failed')
|
||||
const output = (result as any).output || result
|
||||
const data = output.output?.data ?? output.data
|
||||
const status = output.output?.status ?? output.status ?? 200
|
||||
const respHeaders = output.output?.headers ?? output.headers ?? {}
|
||||
if (!result.success) throw new Error(result.error ?? 'API request failed')
|
||||
|
||||
const CAP = Number(process.env.COPILOT_TOOL_RESULT_CHAR_CAP || 20000)
|
||||
const toStringSafe = (val: any): string => {
|
||||
const output = result.output as Record<string, unknown> | undefined
|
||||
const nestedOutput = output?.output as Record<string, unknown> | undefined
|
||||
const data = nestedOutput?.data ?? output?.data
|
||||
const status = (nestedOutput?.status ?? output?.status ?? 200) as number
|
||||
const respHeaders = (nestedOutput?.headers ?? output?.headers ?? {}) as Record<string, string>
|
||||
|
||||
const toStringSafe = (val: unknown): string => {
|
||||
if (typeof val === 'string') return val
|
||||
try {
|
||||
return JSON.stringify(val)
|
||||
@@ -53,7 +66,6 @@ export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, any>
|
||||
try {
|
||||
let text = html
|
||||
let previous: string
|
||||
|
||||
do {
|
||||
previous = text
|
||||
text = text.replace(/<script[\s\S]*?<\/script\s*>/gi, '')
|
||||
@@ -61,26 +73,21 @@ export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, any>
|
||||
text = text.replace(/<[^>]*>/g, ' ')
|
||||
text = text.replace(/[<>]/g, ' ')
|
||||
} while (text !== previous)
|
||||
|
||||
return text.replace(/\s+/g, ' ').trim()
|
||||
} catch {
|
||||
return html
|
||||
}
|
||||
}
|
||||
|
||||
let normalized = toStringSafe(data)
|
||||
const looksLikeHtml =
|
||||
/<html[\s\S]*<\/html>/i.test(normalized) || /<body[\s\S]*<\/body>/i.test(normalized)
|
||||
if (looksLikeHtml) normalized = stripHtml(normalized)
|
||||
|
||||
const totalChars = normalized.length
|
||||
if (totalChars > CAP) {
|
||||
const preview = normalized.slice(0, CAP)
|
||||
logger.warn('API response truncated by character cap', {
|
||||
url,
|
||||
method,
|
||||
totalChars,
|
||||
previewChars: preview.length,
|
||||
cap: CAP,
|
||||
})
|
||||
if (totalChars > RESULT_CHAR_CAP) {
|
||||
const preview = normalized.slice(0, RESULT_CHAR_CAP)
|
||||
logger.warn('API response truncated', { url, method, totalChars, cap: RESULT_CHAR_CAP })
|
||||
return {
|
||||
data: preview,
|
||||
status,
|
||||
@@ -88,10 +95,11 @@ export const makeApiRequestServerTool: BaseServerTool<MakeApiRequestParams, any>
|
||||
truncated: true,
|
||||
totalChars,
|
||||
previewChars: preview.length,
|
||||
note: `Response truncated to ${CAP} characters to avoid large payloads`,
|
||||
note: `Response truncated to ${RESULT_CHAR_CAP} characters`,
|
||||
}
|
||||
}
|
||||
logger.info('API request executed', { url, method, status, totalChars })
|
||||
|
||||
logger.debug('API request executed', { url, method, status, totalChars })
|
||||
return { data: normalized, status, headers: respHeaders }
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,78 +11,73 @@ interface OnlineSearchParams {
|
||||
hl?: string
|
||||
}
|
||||
|
||||
export const searchOnlineServerTool: BaseServerTool<OnlineSearchParams, any> = {
|
||||
interface SearchResult {
|
||||
title: string
|
||||
link: string
|
||||
snippet: string
|
||||
date?: string
|
||||
position?: number
|
||||
}
|
||||
|
||||
interface SearchResponse {
|
||||
results: SearchResult[]
|
||||
query: string
|
||||
type: string
|
||||
totalResults: number
|
||||
source: 'exa' | 'serper'
|
||||
}
|
||||
|
||||
export const searchOnlineServerTool: BaseServerTool<OnlineSearchParams, SearchResponse> = {
|
||||
name: 'search_online',
|
||||
async execute(params: OnlineSearchParams): Promise<any> {
|
||||
async execute(params: OnlineSearchParams): Promise<SearchResponse> {
|
||||
const logger = createLogger('SearchOnlineServerTool')
|
||||
const { query, num = 10, type = 'search', gl, hl } = params
|
||||
if (!query || typeof query !== 'string') throw new Error('query is required')
|
||||
|
||||
// Check which API keys are available
|
||||
const hasExaApiKey = Boolean(env.EXA_API_KEY && String(env.EXA_API_KEY).length > 0)
|
||||
const hasSerperApiKey = Boolean(env.SERPER_API_KEY && String(env.SERPER_API_KEY).length > 0)
|
||||
|
||||
logger.info('Performing online search', {
|
||||
queryLength: query.length,
|
||||
num,
|
||||
type,
|
||||
gl,
|
||||
hl,
|
||||
hasExaApiKey,
|
||||
hasSerperApiKey,
|
||||
})
|
||||
logger.debug('Performing online search', { queryLength: query.length, num, type })
|
||||
|
||||
// Try Exa first if available
|
||||
if (hasExaApiKey) {
|
||||
try {
|
||||
logger.debug('Attempting exa_search', { num })
|
||||
const exaResult = await executeTool('exa_search', {
|
||||
query,
|
||||
numResults: num,
|
||||
type: 'auto',
|
||||
apiKey: env.EXA_API_KEY || '',
|
||||
apiKey: env.EXA_API_KEY ?? '',
|
||||
})
|
||||
|
||||
const exaResults = (exaResult as any)?.output?.results || []
|
||||
const count = Array.isArray(exaResults) ? exaResults.length : 0
|
||||
const firstTitle = count > 0 ? String(exaResults[0]?.title || '') : undefined
|
||||
const output = exaResult.output as { results?: Array<{ title?: string; url?: string; text?: string; summary?: string; publishedDate?: string }> } | undefined
|
||||
const exaResults = output?.results ?? []
|
||||
|
||||
logger.info('exa_search completed', {
|
||||
success: exaResult.success,
|
||||
resultsCount: count,
|
||||
firstTitlePreview: firstTitle?.slice(0, 120),
|
||||
})
|
||||
|
||||
if (exaResult.success && count > 0) {
|
||||
// Transform Exa results to match expected format
|
||||
const transformedResults = exaResults.map((result: any) => ({
|
||||
title: result.title || '',
|
||||
link: result.url || '',
|
||||
snippet: result.text || result.summary || '',
|
||||
if (exaResult.success && exaResults.length > 0) {
|
||||
const transformedResults: SearchResult[] = exaResults.map((result, index) => ({
|
||||
title: result.title ?? '',
|
||||
link: result.url ?? '',
|
||||
snippet: result.text ?? result.summary ?? '',
|
||||
date: result.publishedDate,
|
||||
position: exaResults.indexOf(result) + 1,
|
||||
position: index + 1,
|
||||
}))
|
||||
|
||||
return {
|
||||
results: transformedResults,
|
||||
query,
|
||||
type,
|
||||
totalResults: count,
|
||||
totalResults: transformedResults.length,
|
||||
source: 'exa',
|
||||
}
|
||||
}
|
||||
|
||||
logger.warn('exa_search returned no results, falling back to Serper', {
|
||||
queryLength: query.length,
|
||||
})
|
||||
} catch (exaError: any) {
|
||||
logger.debug('exa_search returned no results, falling back to Serper')
|
||||
} catch (exaError) {
|
||||
logger.warn('exa_search failed, falling back to Serper', {
|
||||
error: exaError?.message,
|
||||
error: exaError instanceof Error ? exaError.message : String(exaError),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to Serper if Exa failed or wasn't available
|
||||
if (!hasSerperApiKey) {
|
||||
throw new Error('No search API keys available (EXA_API_KEY or SERPER_API_KEY required)')
|
||||
}
|
||||
@@ -93,41 +88,24 @@ export const searchOnlineServerTool: BaseServerTool<OnlineSearchParams, any> = {
|
||||
type,
|
||||
gl,
|
||||
hl,
|
||||
apiKey: env.SERPER_API_KEY || '',
|
||||
apiKey: env.SERPER_API_KEY ?? '',
|
||||
}
|
||||
|
||||
try {
|
||||
logger.debug('Calling serper_search tool', { type, num, gl, hl })
|
||||
const result = await executeTool('serper_search', toolParams)
|
||||
const results = (result as any)?.output?.searchResults || []
|
||||
const count = Array.isArray(results) ? results.length : 0
|
||||
const firstTitle = count > 0 ? String(results[0]?.title || '') : undefined
|
||||
const result = await executeTool('serper_search', toolParams)
|
||||
const output = result.output as { searchResults?: SearchResult[] } | undefined
|
||||
const results = output?.searchResults ?? []
|
||||
|
||||
logger.info('serper_search completed', {
|
||||
success: result.success,
|
||||
resultsCount: count,
|
||||
firstTitlePreview: firstTitle?.slice(0, 120),
|
||||
})
|
||||
if (!result.success) {
|
||||
const errorMsg = (result as { error?: string }).error ?? 'Search failed'
|
||||
throw new Error(errorMsg)
|
||||
}
|
||||
|
||||
if (!result.success) {
|
||||
logger.error('serper_search failed', { error: (result as any)?.error })
|
||||
throw new Error((result as any)?.error || 'Search failed')
|
||||
}
|
||||
|
||||
if (count === 0) {
|
||||
logger.warn('serper_search returned no results', { queryLength: query.length })
|
||||
}
|
||||
|
||||
return {
|
||||
results,
|
||||
query,
|
||||
type,
|
||||
totalResults: count,
|
||||
source: 'serper',
|
||||
}
|
||||
} catch (e: any) {
|
||||
logger.error('search_online execution error', { message: e?.message })
|
||||
throw e
|
||||
return {
|
||||
results,
|
||||
query,
|
||||
type,
|
||||
totalResults: results.length,
|
||||
source: 'serper',
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { BaseServerTool, ServerToolContext } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
|
||||
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
|
||||
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
|
||||
@@ -13,101 +13,52 @@ import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-cr
|
||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||
import {
|
||||
ExecuteResponseSuccessSchema,
|
||||
GetBlockConfigInput,
|
||||
GetBlockConfigResult,
|
||||
GetBlockOptionsInput,
|
||||
GetBlockOptionsResult,
|
||||
GetBlocksAndToolsInput,
|
||||
GetBlocksAndToolsResult,
|
||||
GetBlocksMetadataInput,
|
||||
GetBlocksMetadataResult,
|
||||
GetTriggerBlocksInput,
|
||||
GetTriggerBlocksResult,
|
||||
KnowledgeBaseArgsSchema,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
|
||||
// Generic execute response schemas (success path only for this route; errors handled via HTTP status)
|
||||
export { ExecuteResponseSuccessSchema }
|
||||
export type ExecuteResponseSuccess = (typeof ExecuteResponseSuccessSchema)['_type']
|
||||
|
||||
// Define server tool registry for the new copilot runtime
|
||||
const serverToolRegistry: Record<string, BaseServerTool<any, any>> = {}
|
||||
const logger = createLogger('ServerToolRouter')
|
||||
|
||||
// Register tools
|
||||
serverToolRegistry[getBlocksAndToolsServerTool.name] = getBlocksAndToolsServerTool
|
||||
serverToolRegistry[getBlocksMetadataServerTool.name] = getBlocksMetadataServerTool
|
||||
serverToolRegistry[getBlockOptionsServerTool.name] = getBlockOptionsServerTool
|
||||
serverToolRegistry[getBlockConfigServerTool.name] = getBlockConfigServerTool
|
||||
serverToolRegistry[getTriggerBlocksServerTool.name] = getTriggerBlocksServerTool
|
||||
serverToolRegistry[editWorkflowServerTool.name] = editWorkflowServerTool
|
||||
serverToolRegistry[getWorkflowConsoleServerTool.name] = getWorkflowConsoleServerTool
|
||||
serverToolRegistry[searchDocumentationServerTool.name] = searchDocumentationServerTool
|
||||
serverToolRegistry[searchOnlineServerTool.name] = searchOnlineServerTool
|
||||
serverToolRegistry[setEnvironmentVariablesServerTool.name] = setEnvironmentVariablesServerTool
|
||||
serverToolRegistry[getCredentialsServerTool.name] = getCredentialsServerTool
|
||||
serverToolRegistry[makeApiRequestServerTool.name] = makeApiRequestServerTool
|
||||
serverToolRegistry[knowledgeBaseServerTool.name] = knowledgeBaseServerTool
|
||||
/** Registry of all server tools. Tools self-declare their validation schemas. */
|
||||
const serverToolRegistry: Record<string, BaseServerTool> = {
|
||||
[getBlocksAndToolsServerTool.name]: getBlocksAndToolsServerTool,
|
||||
[getBlocksMetadataServerTool.name]: getBlocksMetadataServerTool,
|
||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
||||
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
||||
[setEnvironmentVariablesServerTool.name]: setEnvironmentVariablesServerTool,
|
||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||
}
|
||||
|
||||
/**
|
||||
* Route a tool execution request to the appropriate server tool.
|
||||
* Validates input/output using the tool's declared Zod schemas if present.
|
||||
*/
|
||||
export async function routeExecution(
|
||||
toolName: string,
|
||||
payload: unknown,
|
||||
context?: { userId: string }
|
||||
): Promise<any> {
|
||||
context?: ServerToolContext
|
||||
): Promise<unknown> {
|
||||
const tool = serverToolRegistry[toolName]
|
||||
if (!tool) {
|
||||
throw new Error(`Unknown server tool: ${toolName}`)
|
||||
}
|
||||
logger.debug('Routing to tool', {
|
||||
toolName,
|
||||
payloadPreview: (() => {
|
||||
try {
|
||||
return JSON.stringify(payload).slice(0, 200)
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
})(),
|
||||
})
|
||||
|
||||
let args: any = payload || {}
|
||||
if (toolName === 'get_blocks_and_tools') {
|
||||
args = GetBlocksAndToolsInput.parse(args)
|
||||
}
|
||||
if (toolName === 'get_blocks_metadata') {
|
||||
args = GetBlocksMetadataInput.parse(args)
|
||||
}
|
||||
if (toolName === 'get_block_options') {
|
||||
args = GetBlockOptionsInput.parse(args)
|
||||
}
|
||||
if (toolName === 'get_block_config') {
|
||||
args = GetBlockConfigInput.parse(args)
|
||||
}
|
||||
if (toolName === 'get_trigger_blocks') {
|
||||
args = GetTriggerBlocksInput.parse(args)
|
||||
}
|
||||
if (toolName === 'knowledge_base') {
|
||||
args = KnowledgeBaseArgsSchema.parse(args)
|
||||
}
|
||||
logger.debug('Routing to tool', { toolName })
|
||||
|
||||
// Validate input if tool declares a schema
|
||||
const args = tool.inputSchema ? tool.inputSchema.parse(payload ?? {}) : (payload ?? {})
|
||||
|
||||
// Execute
|
||||
const result = await tool.execute(args, context)
|
||||
|
||||
if (toolName === 'get_blocks_and_tools') {
|
||||
return GetBlocksAndToolsResult.parse(result)
|
||||
}
|
||||
if (toolName === 'get_blocks_metadata') {
|
||||
return GetBlocksMetadataResult.parse(result)
|
||||
}
|
||||
if (toolName === 'get_block_options') {
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
if (toolName === 'get_block_config') {
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
if (toolName === 'get_trigger_blocks') {
|
||||
return GetTriggerBlocksResult.parse(result)
|
||||
}
|
||||
|
||||
return result
|
||||
// Validate output if tool declares a schema
|
||||
return tool.outputSchema ? tool.outputSchema.parse(result) : result
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,633 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/visibility'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
import type { EditWorkflowOperation, SkippedItem, ValidationError } from './types'
|
||||
import { UUID_REGEX, logSkippedItem } from './types'
|
||||
import {
|
||||
validateInputsForBlock,
|
||||
validateSourceHandleForBlock,
|
||||
validateTargetHandle,
|
||||
} from './validation'
|
||||
|
||||
/**
|
||||
* Helper to create a block state from operation params
|
||||
*/
|
||||
export function createBlockFromParams(
|
||||
blockId: string,
|
||||
params: any,
|
||||
parentId?: string,
|
||||
errorsCollector?: ValidationError[],
|
||||
permissionConfig?: PermissionGroupConfig | null,
|
||||
skippedItems?: SkippedItem[]
|
||||
): any {
|
||||
const blockConfig = getAllBlocks().find((b) => b.type === params.type)
|
||||
|
||||
// Validate inputs against block configuration
|
||||
let validatedInputs: Record<string, any> | undefined
|
||||
if (params.inputs) {
|
||||
const result = validateInputsForBlock(params.type, params.inputs, blockId)
|
||||
validatedInputs = result.validInputs
|
||||
if (errorsCollector && result.errors.length > 0) {
|
||||
errorsCollector.push(...result.errors)
|
||||
}
|
||||
}
|
||||
|
||||
// Determine outputs based on trigger mode
|
||||
const triggerMode = params.triggerMode || false
|
||||
let outputs: Record<string, any>
|
||||
|
||||
if (params.outputs) {
|
||||
outputs = params.outputs
|
||||
} else if (blockConfig) {
|
||||
const subBlocks: Record<string, any> = {}
|
||||
if (validatedInputs) {
|
||||
Object.entries(validatedInputs).forEach(([key, value]) => {
|
||||
// Skip runtime subblock IDs when computing outputs
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) {
|
||||
return
|
||||
}
|
||||
subBlocks[key] = { id: key, type: 'short-input', value: value }
|
||||
})
|
||||
}
|
||||
outputs = getBlockOutputs(params.type, subBlocks, triggerMode)
|
||||
} else {
|
||||
outputs = {}
|
||||
}
|
||||
|
||||
const blockState: any = {
|
||||
id: blockId,
|
||||
type: params.type,
|
||||
name: params.name,
|
||||
position: { x: 0, y: 0 },
|
||||
enabled: params.enabled !== undefined ? params.enabled : true,
|
||||
horizontalHandles: true,
|
||||
advancedMode: params.advancedMode || false,
|
||||
height: 0,
|
||||
triggerMode: triggerMode,
|
||||
subBlocks: {},
|
||||
outputs: outputs,
|
||||
data: parentId ? { parentId, extent: 'parent' as const } : {},
|
||||
locked: false,
|
||||
}
|
||||
|
||||
// Add validated inputs as subBlocks
|
||||
if (validatedInputs) {
|
||||
Object.entries(validatedInputs).forEach(([key, value]) => {
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) {
|
||||
return
|
||||
}
|
||||
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
if (key === 'tools' && Array.isArray(value)) {
|
||||
sanitizedValue = filterDisallowedTools(
|
||||
normalizeTools(value),
|
||||
permissionConfig ?? null,
|
||||
blockId,
|
||||
skippedItems ?? []
|
||||
)
|
||||
}
|
||||
|
||||
// Special handling for responseFormat - normalize to ensure consistent format
|
||||
if (key === 'responseFormat' && value) {
|
||||
sanitizedValue = normalizeResponseFormat(value)
|
||||
}
|
||||
|
||||
blockState.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: sanitizedValue,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Set up subBlocks from block configuration
|
||||
if (blockConfig) {
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
if (!blockState.subBlocks[subBlock.id]) {
|
||||
blockState.subBlocks[subBlock.id] = {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
value: null,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (validatedInputs) {
|
||||
updateCanonicalModesForInputs(blockState, Object.keys(validatedInputs), blockConfig)
|
||||
}
|
||||
}
|
||||
|
||||
return blockState
|
||||
}
|
||||
|
||||
export function updateCanonicalModesForInputs(
|
||||
block: { data?: { canonicalModes?: Record<string, 'basic' | 'advanced'> } },
|
||||
inputKeys: string[],
|
||||
blockConfig: BlockConfig
|
||||
): void {
|
||||
if (!blockConfig.subBlocks?.length) return
|
||||
|
||||
const canonicalIndex = buildCanonicalIndex(blockConfig.subBlocks)
|
||||
const canonicalModeUpdates: Record<string, 'basic' | 'advanced'> = {}
|
||||
|
||||
for (const inputKey of inputKeys) {
|
||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[inputKey]
|
||||
if (!canonicalId) continue
|
||||
|
||||
const group = canonicalIndex.groupsById[canonicalId]
|
||||
if (!group || !isCanonicalPair(group)) continue
|
||||
|
||||
const isAdvanced = group.advancedIds.includes(inputKey)
|
||||
const existingMode = canonicalModeUpdates[canonicalId]
|
||||
|
||||
if (!existingMode || isAdvanced) {
|
||||
canonicalModeUpdates[canonicalId] = isAdvanced ? 'advanced' : 'basic'
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(canonicalModeUpdates).length > 0) {
|
||||
if (!block.data) block.data = {}
|
||||
if (!block.data.canonicalModes) block.data.canonicalModes = {}
|
||||
Object.assign(block.data.canonicalModes, canonicalModeUpdates)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize tools array by adding back fields that were sanitized for training
|
||||
*/
|
||||
export function normalizeTools(tools: any[]): any[] {
|
||||
return tools.map((tool) => {
|
||||
if (tool.type === 'custom-tool') {
|
||||
// New reference format: minimal fields only
|
||||
if (tool.customToolId && !tool.schema && !tool.code) {
|
||||
return {
|
||||
type: tool.type,
|
||||
customToolId: tool.customToolId,
|
||||
usageControl: tool.usageControl || 'auto',
|
||||
isExpanded: tool.isExpanded ?? true,
|
||||
}
|
||||
}
|
||||
|
||||
// Legacy inline format: include all fields
|
||||
const normalized: any = {
|
||||
...tool,
|
||||
params: tool.params || {},
|
||||
isExpanded: tool.isExpanded ?? true,
|
||||
}
|
||||
|
||||
// Ensure schema has proper structure (for inline format)
|
||||
if (normalized.schema?.function) {
|
||||
normalized.schema = {
|
||||
type: 'function',
|
||||
function: {
|
||||
name: normalized.schema.function.name || tool.title, // Preserve name or derive from title
|
||||
description: normalized.schema.function.description,
|
||||
parameters: normalized.schema.function.parameters,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
// For other tool types, just ensure isExpanded exists
|
||||
return {
|
||||
...tool,
|
||||
isExpanded: tool.isExpanded ?? true,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Subblock types that store arrays of objects with `id` fields.
|
||||
* The LLM may generate arbitrary IDs which need to be converted to proper UUIDs.
|
||||
*/
|
||||
const ARRAY_WITH_ID_SUBBLOCK_TYPES = new Set([
|
||||
'inputFormat', // input-format: Fields with id, name, type, value, collapsed
|
||||
'headers', // table: Rows with id, cells (used for HTTP headers)
|
||||
'params', // table: Rows with id, cells (used for query params)
|
||||
'variables', // table or variables-input: Rows/assignments with id
|
||||
'tagFilters', // knowledge-tag-filters: Filters with id, tagName, etc.
|
||||
'documentTags', // document-tag-entry: Tags with id, tagName, etc.
|
||||
'metrics', // eval-input: Metrics with id, name, description, range
|
||||
])
|
||||
|
||||
/**
|
||||
* Normalizes array subblock values by ensuring each item has a valid UUID.
|
||||
* The LLM may generate arbitrary IDs like "input-desc-001" or "row-1" which need
|
||||
* to be converted to proper UUIDs for consistency with UI-created items.
|
||||
*/
|
||||
export function normalizeArrayWithIds(value: unknown): any[] {
|
||||
if (!Array.isArray(value)) {
|
||||
return []
|
||||
}
|
||||
|
||||
return value.map((item: any) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return item
|
||||
}
|
||||
|
||||
// Check if id is missing or not a valid UUID
|
||||
const hasValidUUID = typeof item.id === 'string' && UUID_REGEX.test(item.id)
|
||||
if (!hasValidUUID) {
|
||||
return { ...item, id: crypto.randomUUID() }
|
||||
}
|
||||
|
||||
return item
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a subblock key should have its array items normalized with UUIDs.
|
||||
*/
|
||||
export function shouldNormalizeArrayIds(key: string): boolean {
|
||||
return ARRAY_WITH_ID_SUBBLOCK_TYPES.has(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize responseFormat to ensure consistent storage
|
||||
* Handles both string (JSON) and object formats
|
||||
* Returns pretty-printed JSON for better UI readability
|
||||
*/
|
||||
export function normalizeResponseFormat(value: any): string {
|
||||
try {
|
||||
let obj = value
|
||||
|
||||
// If it's already a string, parse it first
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim()
|
||||
if (!trimmed) {
|
||||
return ''
|
||||
}
|
||||
obj = JSON.parse(trimmed)
|
||||
}
|
||||
|
||||
// If it's an object, stringify it with consistent formatting
|
||||
if (obj && typeof obj === 'object') {
|
||||
// Sort keys recursively for consistent comparison
|
||||
const sortKeys = (item: any): any => {
|
||||
if (Array.isArray(item)) {
|
||||
return item.map(sortKeys)
|
||||
}
|
||||
if (item !== null && typeof item === 'object') {
|
||||
return Object.keys(item)
|
||||
.sort()
|
||||
.reduce((result: any, key: string) => {
|
||||
result[key] = sortKeys(item[key])
|
||||
return result
|
||||
}, {})
|
||||
}
|
||||
return item
|
||||
}
|
||||
|
||||
// Return pretty-printed with 2-space indentation for UI readability
|
||||
// The sanitizer will normalize it to minified format for comparison
|
||||
return JSON.stringify(sortKeys(obj), null, 2)
|
||||
}
|
||||
|
||||
return String(value)
|
||||
} catch {
|
||||
// If parsing fails, return the original value as string
|
||||
return String(value)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a validated edge between two blocks.
|
||||
* Returns true if edge was created, false if skipped due to validation errors.
|
||||
*/
|
||||
export function createValidatedEdge(
|
||||
modifiedState: any,
|
||||
sourceBlockId: string,
|
||||
targetBlockId: string,
|
||||
sourceHandle: string,
|
||||
targetHandle: string,
|
||||
operationType: string,
|
||||
logger: ReturnType<typeof createLogger>,
|
||||
skippedItems?: SkippedItem[]
|
||||
): boolean {
|
||||
if (!modifiedState.blocks[targetBlockId]) {
|
||||
logger.warn(`Target block "${targetBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_target',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - target block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlock = modifiedState.blocks[sourceBlockId]
|
||||
if (!sourceBlock) {
|
||||
logger.warn(`Source block "${sourceBlockId}" not found. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block does not exist`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceBlockType = sourceBlock.type
|
||||
if (!sourceBlockType) {
|
||||
logger.warn(`Source block "${sourceBlockId}" has no type. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_edge_source',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: `Edge from "${sourceBlockId}" to "${targetBlockId}" skipped - source block has no type`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const sourceValidation = validateSourceHandleForBlock(sourceHandle, sourceBlockType, sourceBlock)
|
||||
if (!sourceValidation.valid) {
|
||||
logger.warn(`Invalid source handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
sourceHandle,
|
||||
error: sourceValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_source_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: sourceValidation.error || `Invalid source handle "${sourceHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
const targetValidation = validateTargetHandle(targetHandle)
|
||||
if (!targetValidation.valid) {
|
||||
logger.warn(`Invalid target handle. Edge skipped.`, {
|
||||
sourceBlockId,
|
||||
targetBlockId,
|
||||
targetHandle,
|
||||
error: targetValidation.error,
|
||||
})
|
||||
skippedItems?.push({
|
||||
type: 'invalid_target_handle',
|
||||
operationType,
|
||||
blockId: sourceBlockId,
|
||||
reason: targetValidation.error || `Invalid target handle "${targetHandle}"`,
|
||||
details: { sourceHandle, targetHandle, targetId: targetBlockId },
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
// Use normalized handle if available (e.g., 'if' -> 'condition-{uuid}')
|
||||
const finalSourceHandle = sourceValidation.normalizedHandle || sourceHandle
|
||||
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: sourceBlockId,
|
||||
sourceHandle: finalSourceHandle,
|
||||
target: targetBlockId,
|
||||
targetHandle,
|
||||
type: 'default',
|
||||
})
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds connections as edges for a block.
|
||||
* Supports multiple target formats:
|
||||
* - String: "target-block-id"
|
||||
* - Object: { block: "target-block-id", handle?: "custom-target-handle" }
|
||||
* - Array of strings or objects
|
||||
*/
|
||||
export function addConnectionsAsEdges(
|
||||
modifiedState: any,
|
||||
blockId: string,
|
||||
connections: Record<string, any>,
|
||||
logger: ReturnType<typeof createLogger>,
|
||||
skippedItems?: SkippedItem[]
|
||||
): void {
|
||||
Object.entries(connections).forEach(([sourceHandle, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
blockId,
|
||||
targetBlock,
|
||||
sourceHandle,
|
||||
targetHandle || 'target',
|
||||
'add_edge',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdgeForTarget(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdgeForTarget(target)
|
||||
} else if (target?.block) {
|
||||
addEdgeForTarget(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && targets?.block) {
|
||||
addEdgeForTarget(targets.block, targets.handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function applyTriggerConfigToBlockSubblocks(block: any, triggerConfig: Record<string, any>) {
|
||||
if (!block?.subBlocks || !triggerConfig || typeof triggerConfig !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
Object.entries(triggerConfig).forEach(([configKey, configValue]) => {
|
||||
const existingSubblock = block.subBlocks[configKey]
|
||||
if (existingSubblock) {
|
||||
const existingValue = existingSubblock.value
|
||||
const valuesEqual =
|
||||
typeof existingValue === 'object' || typeof configValue === 'object'
|
||||
? JSON.stringify(existingValue) === JSON.stringify(configValue)
|
||||
: existingValue === configValue
|
||||
|
||||
if (valuesEqual) {
|
||||
return
|
||||
}
|
||||
|
||||
block.subBlocks[configKey] = {
|
||||
...existingSubblock,
|
||||
value: configValue,
|
||||
}
|
||||
} else {
|
||||
block.subBlocks[configKey] = {
|
||||
id: configKey,
|
||||
type: 'short-input',
|
||||
value: configValue,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters out tools that are not allowed by the permission group config
|
||||
* Returns both the allowed tools and any skipped tool items for logging
|
||||
*/
|
||||
export function filterDisallowedTools(
|
||||
tools: any[],
|
||||
permissionConfig: PermissionGroupConfig | null,
|
||||
blockId: string,
|
||||
skippedItems: SkippedItem[]
|
||||
): any[] {
|
||||
if (!permissionConfig) {
|
||||
return tools
|
||||
}
|
||||
|
||||
const allowedTools: any[] = []
|
||||
|
||||
for (const tool of tools) {
|
||||
if (tool.type === 'custom-tool' && permissionConfig.disableCustomTools) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'tool_not_allowed',
|
||||
operationType: 'add',
|
||||
blockId,
|
||||
reason: `Custom tool "${tool.title || tool.customToolId || 'unknown'}" is not allowed by permission group - tool not added`,
|
||||
details: { toolType: 'custom-tool', toolId: tool.customToolId },
|
||||
})
|
||||
continue
|
||||
}
|
||||
if (tool.type === 'mcp' && permissionConfig.disableMcpTools) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'tool_not_allowed',
|
||||
operationType: 'add',
|
||||
blockId,
|
||||
reason: `MCP tool "${tool.title || 'unknown'}" is not allowed by permission group - tool not added`,
|
||||
details: { toolType: 'mcp', serverId: tool.params?.serverId },
|
||||
})
|
||||
continue
|
||||
}
|
||||
allowedTools.push(tool)
|
||||
}
|
||||
|
||||
return allowedTools
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes block IDs in operations to ensure they are valid UUIDs.
|
||||
* The LLM may generate human-readable IDs like "web_search" or "research_agent"
|
||||
* which need to be converted to proper UUIDs for database compatibility.
|
||||
*
|
||||
* Returns the normalized operations and a mapping from old IDs to new UUIDs.
|
||||
*/
|
||||
export function normalizeBlockIdsInOperations(operations: EditWorkflowOperation[]): {
|
||||
normalizedOperations: EditWorkflowOperation[]
|
||||
idMapping: Map<string, string>
|
||||
} {
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
const idMapping = new Map<string, string>()
|
||||
|
||||
// First pass: collect all non-UUID block_ids from add/insert operations
|
||||
for (const op of operations) {
|
||||
if (op.operation_type === 'add' || op.operation_type === 'insert_into_subflow') {
|
||||
if (op.block_id && !UUID_REGEX.test(op.block_id)) {
|
||||
const newId = crypto.randomUUID()
|
||||
idMapping.set(op.block_id, newId)
|
||||
logger.debug('Normalizing block ID', { oldId: op.block_id, newId })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (idMapping.size === 0) {
|
||||
return { normalizedOperations: operations, idMapping }
|
||||
}
|
||||
|
||||
logger.info('Normalizing block IDs in operations', {
|
||||
normalizedCount: idMapping.size,
|
||||
mappings: Object.fromEntries(idMapping),
|
||||
})
|
||||
|
||||
// Helper to replace an ID if it's in the mapping
|
||||
const replaceId = (id: string | undefined): string | undefined => {
|
||||
if (!id) return id
|
||||
return idMapping.get(id) ?? id
|
||||
}
|
||||
|
||||
// Second pass: update all references to use new UUIDs
|
||||
const normalizedOperations = operations.map((op) => {
|
||||
const normalized: EditWorkflowOperation = {
|
||||
...op,
|
||||
block_id: replaceId(op.block_id) ?? op.block_id,
|
||||
}
|
||||
|
||||
if (op.params) {
|
||||
normalized.params = { ...op.params }
|
||||
|
||||
// Update subflowId references (for insert_into_subflow)
|
||||
if (normalized.params.subflowId) {
|
||||
normalized.params.subflowId = replaceId(normalized.params.subflowId)
|
||||
}
|
||||
|
||||
// Update connection references
|
||||
if (normalized.params.connections) {
|
||||
const normalizedConnections: Record<string, any> = {}
|
||||
for (const [handle, targets] of Object.entries(normalized.params.connections)) {
|
||||
if (typeof targets === 'string') {
|
||||
normalizedConnections[handle] = replaceId(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
normalizedConnections[handle] = targets.map((t) => {
|
||||
if (typeof t === 'string') return replaceId(t)
|
||||
if (t && typeof t === 'object' && t.block) {
|
||||
return { ...t, block: replaceId(t.block) }
|
||||
}
|
||||
return t
|
||||
})
|
||||
} else if (targets && typeof targets === 'object' && (targets as any).block) {
|
||||
normalizedConnections[handle] = { ...targets, block: replaceId((targets as any).block) }
|
||||
} else {
|
||||
normalizedConnections[handle] = targets
|
||||
}
|
||||
}
|
||||
normalized.params.connections = normalizedConnections
|
||||
}
|
||||
|
||||
// Update nestedNodes block IDs
|
||||
if (normalized.params.nestedNodes) {
|
||||
const normalizedNestedNodes: Record<string, any> = {}
|
||||
for (const [childId, childBlock] of Object.entries(normalized.params.nestedNodes)) {
|
||||
const newChildId = replaceId(childId) ?? childId
|
||||
normalizedNestedNodes[newChildId] = childBlock
|
||||
}
|
||||
normalized.params.nestedNodes = normalizedNestedNodes
|
||||
}
|
||||
}
|
||||
|
||||
return normalized
|
||||
})
|
||||
|
||||
return { normalizedOperations, idMapping }
|
||||
}
|
||||
@@ -0,0 +1,274 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { addConnectionsAsEdges, normalizeBlockIdsInOperations } from './builders'
|
||||
import {
|
||||
handleAddOperation,
|
||||
handleDeleteOperation,
|
||||
handleEditOperation,
|
||||
handleExtractFromSubflowOperation,
|
||||
handleInsertIntoSubflowOperation,
|
||||
} from './operations'
|
||||
import type {
|
||||
ApplyOperationsResult,
|
||||
EditWorkflowOperation,
|
||||
OperationContext,
|
||||
ValidationError,
|
||||
} from './types'
|
||||
import { logSkippedItem, type SkippedItem } from './types'
|
||||
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
|
||||
type OperationHandler = (op: EditWorkflowOperation, ctx: OperationContext) => void
|
||||
|
||||
const OPERATION_HANDLERS: Record<EditWorkflowOperation['operation_type'], OperationHandler> = {
|
||||
delete: handleDeleteOperation,
|
||||
extract_from_subflow: handleExtractFromSubflowOperation,
|
||||
add: handleAddOperation,
|
||||
insert_into_subflow: handleInsertIntoSubflowOperation,
|
||||
edit: handleEditOperation,
|
||||
}
|
||||
|
||||
/**
|
||||
* Topologically sort insert operations to ensure parents are created before children
|
||||
* Returns sorted array where parent inserts always come before child inserts
|
||||
*/
|
||||
export function topologicalSortInserts(
|
||||
inserts: EditWorkflowOperation[],
|
||||
adds: EditWorkflowOperation[]
|
||||
): EditWorkflowOperation[] {
|
||||
if (inserts.length === 0) return []
|
||||
|
||||
// Build a map of blockId -> operation for quick lookup
|
||||
const insertMap = new Map<string, EditWorkflowOperation>()
|
||||
inserts.forEach((op) => insertMap.set(op.block_id, op))
|
||||
|
||||
// Build a set of blocks being added (potential parents)
|
||||
const addedBlocks = new Set(adds.map((op) => op.block_id))
|
||||
|
||||
// Build dependency graph: block -> blocks that depend on it
|
||||
const dependents = new Map<string, Set<string>>()
|
||||
const dependencies = new Map<string, Set<string>>()
|
||||
|
||||
inserts.forEach((op) => {
|
||||
const blockId = op.block_id
|
||||
const parentId = op.params?.subflowId
|
||||
|
||||
dependencies.set(blockId, new Set())
|
||||
|
||||
if (parentId) {
|
||||
// Track dependency if parent is being inserted OR being added
|
||||
// This ensures children wait for parents regardless of operation type
|
||||
const parentBeingCreated = insertMap.has(parentId) || addedBlocks.has(parentId)
|
||||
|
||||
if (parentBeingCreated) {
|
||||
// Only add dependency if parent is also being inserted (not added)
|
||||
// Because adds run before inserts, added parents are already created
|
||||
if (insertMap.has(parentId)) {
|
||||
dependencies.get(blockId)!.add(parentId)
|
||||
if (!dependents.has(parentId)) {
|
||||
dependents.set(parentId, new Set())
|
||||
}
|
||||
dependents.get(parentId)!.add(blockId)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Topological sort using Kahn's algorithm
|
||||
const sorted: EditWorkflowOperation[] = []
|
||||
const queue: string[] = []
|
||||
|
||||
// Start with nodes that have no dependencies (or depend only on added blocks)
|
||||
inserts.forEach((op) => {
|
||||
const deps = dependencies.get(op.block_id)!
|
||||
if (deps.size === 0) {
|
||||
queue.push(op.block_id)
|
||||
}
|
||||
})
|
||||
|
||||
while (queue.length > 0) {
|
||||
const blockId = queue.shift()!
|
||||
const op = insertMap.get(blockId)
|
||||
if (op) {
|
||||
sorted.push(op)
|
||||
}
|
||||
|
||||
// Remove this node from dependencies of others
|
||||
const children = dependents.get(blockId)
|
||||
if (children) {
|
||||
children.forEach((childId) => {
|
||||
const childDeps = dependencies.get(childId)!
|
||||
childDeps.delete(blockId)
|
||||
if (childDeps.size === 0) {
|
||||
queue.push(childId)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// If sorted length doesn't match input, there's a cycle (shouldn't happen with valid operations)
|
||||
// Just append remaining operations
|
||||
if (sorted.length < inserts.length) {
|
||||
inserts.forEach((op) => {
|
||||
if (!sorted.includes(op)) {
|
||||
sorted.push(op)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return sorted
|
||||
}
|
||||
|
||||
function orderOperations(operations: EditWorkflowOperation[]): EditWorkflowOperation[] {
|
||||
/**
|
||||
* Reorder operations to ensure correct execution sequence:
|
||||
* 1. delete - Remove blocks first to free up IDs and clean state
|
||||
* 2. extract_from_subflow - Extract blocks from subflows before modifications
|
||||
* 3. add - Create new blocks (sorted by connection dependencies)
|
||||
* 4. insert_into_subflow - Insert blocks into subflows (sorted by parent dependency)
|
||||
* 5. edit - Edit existing blocks last, so connections to newly added blocks work
|
||||
*/
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
|
||||
const adds = operations.filter((op) => op.operation_type === 'add')
|
||||
const inserts = operations.filter((op) => op.operation_type === 'insert_into_subflow')
|
||||
const edits = operations.filter((op) => op.operation_type === 'edit')
|
||||
|
||||
// Sort insert operations to ensure parents are inserted before children
|
||||
const sortedInserts = topologicalSortInserts(inserts, adds)
|
||||
|
||||
return [...deletes, ...extracts, ...adds, ...sortedInserts, ...edits]
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply operations directly to the workflow JSON state
|
||||
*/
|
||||
export function applyOperationsToWorkflowState(
|
||||
workflowState: Record<string, unknown>,
|
||||
operations: EditWorkflowOperation[],
|
||||
permissionConfig: PermissionGroupConfig | null = null
|
||||
): ApplyOperationsResult {
|
||||
// Deep clone the workflow state to avoid mutations
|
||||
const modifiedState = JSON.parse(JSON.stringify(workflowState))
|
||||
|
||||
// Collect validation errors across all operations
|
||||
const validationErrors: ValidationError[] = []
|
||||
|
||||
// Collect skipped items across all operations
|
||||
const skippedItems: SkippedItem[] = []
|
||||
|
||||
// Normalize block IDs to UUIDs before processing
|
||||
const { normalizedOperations } = normalizeBlockIdsInOperations(operations)
|
||||
|
||||
// Order operations for deterministic application
|
||||
const orderedOperations = orderOperations(normalizedOperations)
|
||||
|
||||
logger.info('Applying operations to workflow:', {
|
||||
totalOperations: orderedOperations.length,
|
||||
operationTypes: orderedOperations.reduce((acc: Record<string, number>, op) => {
|
||||
acc[op.operation_type] = (acc[op.operation_type] || 0) + 1
|
||||
return acc
|
||||
}, {}),
|
||||
initialBlockCount: Object.keys((modifiedState as any).blocks || {}).length,
|
||||
})
|
||||
|
||||
const ctx: OperationContext = {
|
||||
modifiedState,
|
||||
skippedItems,
|
||||
validationErrors,
|
||||
permissionConfig,
|
||||
deferredConnections: [],
|
||||
}
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id } = operation
|
||||
|
||||
// CRITICAL: Validate block_id is a valid string and not "undefined"
|
||||
// This prevents undefined keys from being set in the workflow state
|
||||
if (!isValidKey(block_id)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: operation_type,
|
||||
blockId: String(block_id || 'invalid'),
|
||||
reason: `Invalid block_id "${block_id}" (type: ${typeof block_id}) - operation skipped. Block IDs must be valid non-empty strings.`,
|
||||
})
|
||||
logger.error('Invalid block_id detected in operation', {
|
||||
operation_type,
|
||||
block_id,
|
||||
block_id_type: typeof block_id,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const handler = OPERATION_HANDLERS[operation_type]
|
||||
if (!handler) continue
|
||||
|
||||
logger.debug(`Executing operation: ${operation_type} for block ${block_id}`, {
|
||||
params: operation.params ? Object.keys(operation.params) : [],
|
||||
currentBlockCount: Object.keys((modifiedState as any).blocks || {}).length,
|
||||
})
|
||||
|
||||
handler(operation, ctx)
|
||||
}
|
||||
|
||||
// Pass 2: Add all deferred connections from add/insert operations
|
||||
// Now all blocks exist, so connections can be safely created
|
||||
if (ctx.deferredConnections.length > 0) {
|
||||
logger.info('Processing deferred connections from add/insert operations', {
|
||||
deferredConnectionCount: ctx.deferredConnections.length,
|
||||
totalBlocks: Object.keys((modifiedState as any).blocks || {}).length,
|
||||
})
|
||||
|
||||
for (const { blockId, connections } of ctx.deferredConnections) {
|
||||
// Verify the source block still exists (it might have been deleted by a later operation)
|
||||
if (!(modifiedState as any).blocks[blockId]) {
|
||||
logger.warn('Source block no longer exists for deferred connection', {
|
||||
blockId,
|
||||
availableBlocks: Object.keys((modifiedState as any).blocks || {}),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
addConnectionsAsEdges(modifiedState, blockId, connections, logger, skippedItems)
|
||||
}
|
||||
|
||||
logger.info('Finished processing deferred connections', {
|
||||
totalEdges: (modifiedState as any).edges?.length,
|
||||
})
|
||||
}
|
||||
|
||||
// Regenerate loops and parallels after modifications
|
||||
;(modifiedState as any).loops = generateLoopBlocks((modifiedState as any).blocks)
|
||||
;(modifiedState as any).parallels = generateParallelBlocks((modifiedState as any).blocks)
|
||||
|
||||
// Validate all blocks have types before returning
|
||||
const blocksWithoutType = Object.entries((modifiedState as any).blocks || {})
|
||||
.filter(([_, block]: [string, any]) => !block.type || block.type === undefined)
|
||||
.map(([id, block]: [string, any]) => ({ id, block }))
|
||||
|
||||
if (blocksWithoutType.length > 0) {
|
||||
logger.error('Blocks without type after operations:', {
|
||||
blocksWithoutType: blocksWithoutType.map(({ id, block }) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
keys: Object.keys(block),
|
||||
})),
|
||||
})
|
||||
|
||||
// Attempt to fix by removing type-less blocks
|
||||
blocksWithoutType.forEach(({ id }) => {
|
||||
delete (modifiedState as any).blocks[id]
|
||||
})
|
||||
|
||||
// Remove edges connected to removed blocks
|
||||
const removedIds = new Set(blocksWithoutType.map(({ id }) => id))
|
||||
;(modifiedState as any).edges = ((modifiedState as any).edges || []).filter(
|
||||
(edge: any) => !removedIds.has(edge.source) && !removedIds.has(edge.target)
|
||||
)
|
||||
}
|
||||
|
||||
return { state: modifiedState, validationErrors, skippedItems }
|
||||
}
|
||||
@@ -0,0 +1,284 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow as workflowTable } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
saveWorkflowToNormalizedTables,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { applyOperationsToWorkflowState } from './engine'
|
||||
import type { EditWorkflowParams, ValidationError } from './types'
|
||||
import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation'
|
||||
|
||||
async function getCurrentWorkflowStateFromDb(
|
||||
workflowId: string
|
||||
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
const [workflowRecord] = await db
|
||||
.select()
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
if (!workflowRecord) throw new Error(`Workflow ${workflowId} not found in database`)
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!normalized) throw new Error('Workflow has no normalized data')
|
||||
|
||||
// Validate and fix blocks without types
|
||||
const blocks = { ...normalized.blocks }
|
||||
const invalidBlocks: string[] = []
|
||||
|
||||
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
|
||||
if (!block.type) {
|
||||
logger.warn(`Block ${id} loaded without type from database`, {
|
||||
blockKeys: Object.keys(block),
|
||||
blockName: block.name,
|
||||
})
|
||||
invalidBlocks.push(id)
|
||||
}
|
||||
})
|
||||
|
||||
// Remove invalid blocks
|
||||
invalidBlocks.forEach((id) => delete blocks[id])
|
||||
|
||||
// Remove edges connected to invalid blocks
|
||||
const edges = normalized.edges.filter(
|
||||
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
|
||||
)
|
||||
|
||||
const workflowState: any = {
|
||||
blocks,
|
||||
edges,
|
||||
loops: normalized.loops || {},
|
||||
parallels: normalized.parallels || {},
|
||||
}
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
|
||||
subBlockValues[blockId] = {}
|
||||
Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => {
|
||||
if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value
|
||||
})
|
||||
})
|
||||
return { workflowState, subBlockValues }
|
||||
}
|
||||
|
||||
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
||||
name: 'edit_workflow',
|
||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
const { operations, workflowId, currentUserWorkflow } = params
|
||||
if (!Array.isArray(operations) || operations.length === 0) {
|
||||
throw new Error('operations are required and must be an array')
|
||||
}
|
||||
if (!workflowId) throw new Error('workflowId is required')
|
||||
|
||||
logger.info('Executing edit_workflow', {
|
||||
operationCount: operations.length,
|
||||
workflowId,
|
||||
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||
})
|
||||
|
||||
// Get current workflow state
|
||||
let workflowState: any
|
||||
if (currentUserWorkflow) {
|
||||
try {
|
||||
workflowState = JSON.parse(currentUserWorkflow)
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse currentUserWorkflow', error)
|
||||
throw new Error('Invalid currentUserWorkflow format')
|
||||
}
|
||||
} else {
|
||||
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
||||
workflowState = fromDb.workflowState
|
||||
}
|
||||
|
||||
// Get permission config for the user
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
|
||||
// Pre-validate credential and apiKey inputs before applying operations
|
||||
// This filters out invalid credentials and apiKeys for hosted models
|
||||
let operationsToApply = operations
|
||||
const credentialErrors: ValidationError[] = []
|
||||
if (context?.userId) {
|
||||
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
|
||||
operations,
|
||||
{ userId: context.userId },
|
||||
workflowState
|
||||
)
|
||||
operationsToApply = filteredOperations
|
||||
credentialErrors.push(...credErrors)
|
||||
}
|
||||
|
||||
// Apply operations directly to the workflow state
|
||||
const {
|
||||
state: modifiedWorkflowState,
|
||||
validationErrors,
|
||||
skippedItems,
|
||||
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
|
||||
|
||||
// Add credential validation errors
|
||||
validationErrors.push(...credentialErrors)
|
||||
|
||||
// Get workspaceId for selector validation
|
||||
let workspaceId: string | undefined
|
||||
try {
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
workspaceId = workflowRecord?.workspaceId ?? undefined
|
||||
} catch (error) {
|
||||
logger.warn('Failed to get workspaceId for selector validation', { error, workflowId })
|
||||
}
|
||||
|
||||
// Validate selector IDs exist in the database
|
||||
if (context?.userId) {
|
||||
try {
|
||||
const selectorErrors = await validateWorkflowSelectorIds(modifiedWorkflowState, {
|
||||
userId: context.userId,
|
||||
workspaceId,
|
||||
})
|
||||
validationErrors.push(...selectorErrors)
|
||||
} catch (error) {
|
||||
logger.warn('Selector ID validation failed', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Validate the workflow state
|
||||
const validation = validateWorkflowState(modifiedWorkflowState, { sanitize: true })
|
||||
|
||||
if (!validation.valid) {
|
||||
logger.error('Edited workflow state is invalid', {
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
throw new Error(`Invalid edited workflow: ${validation.errors.join('; ')}`)
|
||||
}
|
||||
|
||||
if (validation.warnings.length > 0) {
|
||||
logger.warn('Edited workflow validation warnings', {
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
}
|
||||
|
||||
// Extract and persist custom tools to database (reuse workspaceId from selector validation)
|
||||
if (context?.userId && workspaceId) {
|
||||
try {
|
||||
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||
const { saved, errors } = await extractAndPersistCustomTools(
|
||||
finalWorkflowState,
|
||||
workspaceId,
|
||||
context.userId
|
||||
)
|
||||
|
||||
if (saved > 0) {
|
||||
logger.info(`Persisted ${saved} custom tool(s) to database`, { workflowId })
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
logger.warn('Some custom tools failed to persist', { errors, workflowId })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to persist custom tools', { error, workflowId })
|
||||
}
|
||||
} else if (context?.userId && !workspaceId) {
|
||||
logger.warn('Workflow has no workspaceId, skipping custom tools persistence', {
|
||||
workflowId,
|
||||
})
|
||||
} else {
|
||||
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
||||
}
|
||||
|
||||
logger.info('edit_workflow successfully applied operations', {
|
||||
operationCount: operations.length,
|
||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||
edgesCount: modifiedWorkflowState.edges.length,
|
||||
inputValidationErrors: validationErrors.length,
|
||||
skippedItemsCount: skippedItems.length,
|
||||
schemaValidationErrors: validation.errors.length,
|
||||
validationWarnings: validation.warnings.length,
|
||||
})
|
||||
|
||||
// Format validation errors for LLM feedback
|
||||
const inputErrors =
|
||||
validationErrors.length > 0
|
||||
? validationErrors.map((e) => `Block "${e.blockId}" (${e.blockType}): ${e.error}`)
|
||||
: undefined
|
||||
|
||||
// Format skipped items for LLM feedback
|
||||
const skippedMessages = skippedItems.length > 0 ? skippedItems.map((item) => item.reason) : undefined
|
||||
|
||||
// Persist the workflow state to the database
|
||||
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||
|
||||
// Apply autolayout to position blocks properly
|
||||
const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
|
||||
horizontalSpacing: 250,
|
||||
verticalSpacing: 100,
|
||||
padding: { x: 100, y: 100 },
|
||||
})
|
||||
|
||||
const layoutedBlocks =
|
||||
layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks
|
||||
|
||||
if (!layoutResult.success) {
|
||||
logger.warn('Autolayout failed, using default positions', {
|
||||
workflowId,
|
||||
error: layoutResult.error,
|
||||
})
|
||||
}
|
||||
|
||||
const workflowStateForDb = {
|
||||
blocks: layoutedBlocks,
|
||||
edges: finalWorkflowState.edges,
|
||||
loops: generateLoopBlocks(layoutedBlocks as any),
|
||||
parallels: generateParallelBlocks(layoutedBlocks as any),
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
}
|
||||
|
||||
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowStateForDb as any)
|
||||
if (!saveResult.success) {
|
||||
logger.error('Failed to persist workflow state to database', {
|
||||
workflowId,
|
||||
error: saveResult.error,
|
||||
})
|
||||
throw new Error(`Failed to save workflow: ${saveResult.error}`)
|
||||
}
|
||||
|
||||
// Update workflow's lastSynced timestamp
|
||||
await db
|
||||
.update(workflowTable)
|
||||
.set({
|
||||
lastSynced: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
|
||||
logger.info('Workflow state persisted to database', { workflowId })
|
||||
|
||||
// Return the modified workflow state with autolayout applied
|
||||
return {
|
||||
success: true,
|
||||
workflowState: { ...finalWorkflowState, blocks: layoutedBlocks },
|
||||
// Include input validation errors so the LLM can see what was rejected
|
||||
...(inputErrors && {
|
||||
inputValidationErrors: inputErrors,
|
||||
inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`,
|
||||
}),
|
||||
// Include skipped items so the LLM can see what operations were skipped
|
||||
...(skippedMessages && {
|
||||
skippedItems: skippedMessages,
|
||||
skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`,
|
||||
}),
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,996 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { RESERVED_BLOCK_NAMES, normalizeName } from '@/executor/constants'
|
||||
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
import {
|
||||
addConnectionsAsEdges,
|
||||
applyTriggerConfigToBlockSubblocks,
|
||||
createBlockFromParams,
|
||||
createValidatedEdge,
|
||||
filterDisallowedTools,
|
||||
normalizeArrayWithIds,
|
||||
normalizeResponseFormat,
|
||||
normalizeTools,
|
||||
shouldNormalizeArrayIds,
|
||||
updateCanonicalModesForInputs,
|
||||
} from './builders'
|
||||
import type { EditWorkflowOperation, OperationContext } from './types'
|
||||
import { logSkippedItem } from './types'
|
||||
import {
|
||||
findBlockWithDuplicateNormalizedName,
|
||||
isBlockTypeAllowed,
|
||||
validateInputsForBlock,
|
||||
} from './validation'
|
||||
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
|
||||
export function handleDeleteOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
|
||||
const { modifiedState, skippedItems } = ctx
|
||||
const { block_id } = op
|
||||
|
||||
if (!modifiedState.blocks[block_id]) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_found',
|
||||
operationType: 'delete',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" does not exist and cannot be deleted`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if block is locked or inside a locked container
|
||||
const deleteBlock = modifiedState.blocks[block_id]
|
||||
const deleteParentId = deleteBlock.data?.parentId as string | undefined
|
||||
const deleteParentLocked = deleteParentId ? modifiedState.blocks[deleteParentId]?.locked : false
|
||||
if (deleteBlock.locked || deleteParentLocked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'delete',
|
||||
blockId: block_id,
|
||||
reason: deleteParentLocked
|
||||
? `Block "${block_id}" is inside locked container "${deleteParentId}" and cannot be deleted`
|
||||
: `Block "${block_id}" is locked and cannot be deleted`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Find all child blocks to remove
|
||||
const blocksToRemove = new Set<string>([block_id])
|
||||
const findChildren = (parentId: string) => {
|
||||
Object.entries(modifiedState.blocks).forEach(([childId, child]: [string, any]) => {
|
||||
if (child.data?.parentId === parentId) {
|
||||
blocksToRemove.add(childId)
|
||||
findChildren(childId)
|
||||
}
|
||||
})
|
||||
}
|
||||
findChildren(block_id)
|
||||
|
||||
// Remove blocks
|
||||
blocksToRemove.forEach((id) => delete modifiedState.blocks[id])
|
||||
|
||||
// Remove edges connected to deleted blocks
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => !blocksToRemove.has(edge.source) && !blocksToRemove.has(edge.target)
|
||||
)
|
||||
}
|
||||
|
||||
export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
|
||||
const { modifiedState, skippedItems, validationErrors, permissionConfig } = ctx
|
||||
const { block_id, params } = op
|
||||
|
||||
if (!modifiedState.blocks[block_id]) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_found',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" does not exist and cannot be edited`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const block = modifiedState.blocks[block_id]
|
||||
|
||||
// Check if block is locked or inside a locked container
|
||||
const editParentId = block.data?.parentId as string | undefined
|
||||
const editParentLocked = editParentId ? modifiedState.blocks[editParentId]?.locked : false
|
||||
if (block.locked || editParentLocked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: editParentLocked
|
||||
? `Block "${block_id}" is inside locked container "${editParentId}" and cannot be edited`
|
||||
: `Block "${block_id}" is locked and cannot be edited`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure block has essential properties
|
||||
if (!block.type) {
|
||||
logger.warn(`Block ${block_id} missing type property, skipping edit`, {
|
||||
blockKeys: Object.keys(block),
|
||||
blockData: JSON.stringify(block),
|
||||
})
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_found',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" exists but has no type property`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Update inputs (convert to subBlocks format)
|
||||
if (params?.inputs) {
|
||||
if (!block.subBlocks) block.subBlocks = {}
|
||||
|
||||
// Validate inputs against block configuration
|
||||
const validationResult = validateInputsForBlock(block.type, params.inputs, block_id)
|
||||
validationErrors.push(...validationResult.errors)
|
||||
|
||||
Object.entries(validationResult.validInputs).forEach(([inputKey, value]) => {
|
||||
// Normalize common field name variations (LLM may use plural/singular inconsistently)
|
||||
let key = inputKey
|
||||
if (key === 'credentials' && !block.subBlocks.credentials && block.subBlocks.credential) {
|
||||
key = 'credential'
|
||||
}
|
||||
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) {
|
||||
return
|
||||
}
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
if (key === 'tools' && Array.isArray(value)) {
|
||||
sanitizedValue = filterDisallowedTools(
|
||||
normalizeTools(value),
|
||||
permissionConfig,
|
||||
block_id,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
// Special handling for responseFormat - normalize to ensure consistent format
|
||||
if (key === 'responseFormat' && value) {
|
||||
sanitizedValue = normalizeResponseFormat(value)
|
||||
}
|
||||
|
||||
if (!block.subBlocks[key]) {
|
||||
block.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: sanitizedValue,
|
||||
}
|
||||
} else {
|
||||
const existingValue = block.subBlocks[key].value
|
||||
const valuesEqual =
|
||||
typeof existingValue === 'object' || typeof sanitizedValue === 'object'
|
||||
? JSON.stringify(existingValue) === JSON.stringify(sanitizedValue)
|
||||
: existingValue === sanitizedValue
|
||||
|
||||
if (!valuesEqual) {
|
||||
block.subBlocks[key].value = sanitizedValue
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (
|
||||
Object.hasOwn(params.inputs, 'triggerConfig') &&
|
||||
block.subBlocks.triggerConfig &&
|
||||
typeof block.subBlocks.triggerConfig.value === 'object'
|
||||
) {
|
||||
applyTriggerConfigToBlockSubblocks(block, block.subBlocks.triggerConfig.value)
|
||||
}
|
||||
|
||||
// Update loop/parallel configuration in block.data (strict validation)
|
||||
if (block.type === 'loop') {
|
||||
block.data = block.data || {}
|
||||
// loopType is always valid
|
||||
if (params.inputs.loopType !== undefined) {
|
||||
const validLoopTypes = ['for', 'forEach', 'while', 'doWhile']
|
||||
if (validLoopTypes.includes(params.inputs.loopType)) {
|
||||
block.data.loopType = params.inputs.loopType
|
||||
}
|
||||
}
|
||||
const effectiveLoopType = params.inputs.loopType ?? block.data.loopType ?? 'for'
|
||||
// iterations only valid for 'for' loopType
|
||||
if (params.inputs.iterations !== undefined && effectiveLoopType === 'for') {
|
||||
block.data.count = params.inputs.iterations
|
||||
}
|
||||
// collection only valid for 'forEach' loopType
|
||||
if (params.inputs.collection !== undefined && effectiveLoopType === 'forEach') {
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
// condition only valid for 'while' or 'doWhile' loopType
|
||||
if (
|
||||
params.inputs.condition !== undefined &&
|
||||
(effectiveLoopType === 'while' || effectiveLoopType === 'doWhile')
|
||||
) {
|
||||
if (effectiveLoopType === 'doWhile') {
|
||||
block.data.doWhileCondition = params.inputs.condition
|
||||
} else {
|
||||
block.data.whileCondition = params.inputs.condition
|
||||
}
|
||||
}
|
||||
} else if (block.type === 'parallel') {
|
||||
block.data = block.data || {}
|
||||
// parallelType is always valid
|
||||
if (params.inputs.parallelType !== undefined) {
|
||||
const validParallelTypes = ['count', 'collection']
|
||||
if (validParallelTypes.includes(params.inputs.parallelType)) {
|
||||
block.data.parallelType = params.inputs.parallelType
|
||||
}
|
||||
}
|
||||
const effectiveParallelType = params.inputs.parallelType ?? block.data.parallelType ?? 'count'
|
||||
// count only valid for 'count' parallelType
|
||||
if (params.inputs.count !== undefined && effectiveParallelType === 'count') {
|
||||
block.data.count = params.inputs.count
|
||||
}
|
||||
// collection only valid for 'collection' parallelType
|
||||
if (params.inputs.collection !== undefined && effectiveParallelType === 'collection') {
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
}
|
||||
|
||||
const editBlockConfig = getBlock(block.type)
|
||||
if (editBlockConfig) {
|
||||
updateCanonicalModesForInputs(block, Object.keys(validationResult.validInputs), editBlockConfig)
|
||||
}
|
||||
}
|
||||
|
||||
// Update basic properties
|
||||
if (params?.type !== undefined) {
|
||||
// Special container types (loop, parallel) are not in the block registry but are valid
|
||||
const isContainerType = params.type === 'loop' || params.type === 'parallel'
|
||||
|
||||
// Validate type before setting (skip validation for container types)
|
||||
const blockConfig = getBlock(params.type)
|
||||
if (!blockConfig && !isContainerType) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_block_type',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Invalid block type "${params.type}" - type change skipped`,
|
||||
details: { requestedType: params.type },
|
||||
})
|
||||
} else if (!isContainerType && !isBlockTypeAllowed(params.type, permissionConfig)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_allowed',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Block type "${params.type}" is not allowed by permission group - type change skipped`,
|
||||
details: { requestedType: params.type },
|
||||
})
|
||||
} else {
|
||||
block.type = params.type
|
||||
}
|
||||
}
|
||||
if (params?.name !== undefined) {
|
||||
const normalizedName = normalizeName(params.name)
|
||||
if (!normalizedName) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Cannot rename to empty name`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
} else if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(normalizedName)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'reserved_block_name',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Cannot rename to "${params.name}" - this is a reserved name`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
} else {
|
||||
const conflictingBlock = findBlockWithDuplicateNormalizedName(
|
||||
modifiedState.blocks,
|
||||
params.name,
|
||||
block_id
|
||||
)
|
||||
|
||||
if (conflictingBlock) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'duplicate_block_name',
|
||||
operationType: 'edit',
|
||||
blockId: block_id,
|
||||
reason: `Cannot rename to "${params.name}" - conflicts with "${conflictingBlock[1].name}"`,
|
||||
details: {
|
||||
requestedName: params.name,
|
||||
conflictingBlockId: conflictingBlock[0],
|
||||
conflictingBlockName: conflictingBlock[1].name,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
block.name = params.name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle trigger mode toggle
|
||||
if (typeof params?.triggerMode === 'boolean') {
|
||||
block.triggerMode = params.triggerMode
|
||||
|
||||
if (params.triggerMode === true) {
|
||||
// Remove all incoming edges when enabling trigger mode
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.target !== block_id)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle advanced mode toggle
|
||||
if (typeof params?.advancedMode === 'boolean') {
|
||||
block.advancedMode = params.advancedMode
|
||||
}
|
||||
|
||||
// Handle nested nodes update (for loops/parallels)
|
||||
if (params?.nestedNodes) {
|
||||
// Remove all existing child blocks
|
||||
const existingChildren = Object.keys(modifiedState.blocks).filter(
|
||||
(id) => modifiedState.blocks[id].data?.parentId === block_id
|
||||
)
|
||||
existingChildren.forEach((childId) => delete modifiedState.blocks[childId])
|
||||
|
||||
// Remove edges to/from removed children
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => !existingChildren.includes(edge.source) && !existingChildren.includes(edge.target)
|
||||
)
|
||||
|
||||
// Add new nested blocks
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (childBlock.type === 'loop' || childBlock.type === 'parallel') {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'nested_subflow_not_allowed',
|
||||
operationType: 'edit_nested_node',
|
||||
blockId: childId,
|
||||
reason: `Cannot nest ${childBlock.type} inside ${block.type} - nested subflows are not supported`,
|
||||
details: { parentType: block.type, childType: childBlock.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
block_id,
|
||||
validationErrors,
|
||||
permissionConfig,
|
||||
skippedItems
|
||||
)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Add connections for child block
|
||||
if (childBlock.connections) {
|
||||
addConnectionsAsEdges(modifiedState, childId, childBlock.connections, logger, skippedItems)
|
||||
}
|
||||
})
|
||||
|
||||
// Update loop/parallel configuration based on type (strict validation)
|
||||
if (block.type === 'loop') {
|
||||
block.data = block.data || {}
|
||||
// loopType is always valid
|
||||
if (params.inputs?.loopType) {
|
||||
const validLoopTypes = ['for', 'forEach', 'while', 'doWhile']
|
||||
if (validLoopTypes.includes(params.inputs.loopType)) {
|
||||
block.data.loopType = params.inputs.loopType
|
||||
}
|
||||
}
|
||||
const effectiveLoopType = params.inputs?.loopType ?? block.data.loopType ?? 'for'
|
||||
// iterations only valid for 'for' loopType
|
||||
if (params.inputs?.iterations && effectiveLoopType === 'for') {
|
||||
block.data.count = params.inputs.iterations
|
||||
}
|
||||
// collection only valid for 'forEach' loopType
|
||||
if (params.inputs?.collection && effectiveLoopType === 'forEach') {
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
// condition only valid for 'while' or 'doWhile' loopType
|
||||
if (
|
||||
params.inputs?.condition &&
|
||||
(effectiveLoopType === 'while' || effectiveLoopType === 'doWhile')
|
||||
) {
|
||||
if (effectiveLoopType === 'doWhile') {
|
||||
block.data.doWhileCondition = params.inputs.condition
|
||||
} else {
|
||||
block.data.whileCondition = params.inputs.condition
|
||||
}
|
||||
}
|
||||
} else if (block.type === 'parallel') {
|
||||
block.data = block.data || {}
|
||||
// parallelType is always valid
|
||||
if (params.inputs?.parallelType) {
|
||||
const validParallelTypes = ['count', 'collection']
|
||||
if (validParallelTypes.includes(params.inputs.parallelType)) {
|
||||
block.data.parallelType = params.inputs.parallelType
|
||||
}
|
||||
}
|
||||
const effectiveParallelType = params.inputs?.parallelType ?? block.data.parallelType ?? 'count'
|
||||
// count only valid for 'count' parallelType
|
||||
if (params.inputs?.count && effectiveParallelType === 'count') {
|
||||
block.data.count = params.inputs.count
|
||||
}
|
||||
// collection only valid for 'collection' parallelType
|
||||
if (params.inputs?.collection && effectiveParallelType === 'collection') {
|
||||
block.data.collection = params.inputs.collection
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connections update (convert to edges)
|
||||
if (params?.connections) {
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
Object.entries(params.connections).forEach(([connectionType, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
const mapConnectionTypeToHandle = (type: string): string => {
|
||||
if (type === 'success') return 'source'
|
||||
if (type === 'error') return 'error'
|
||||
return type
|
||||
}
|
||||
|
||||
const sourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
|
||||
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
|
||||
createValidatedEdge(
|
||||
modifiedState,
|
||||
block_id,
|
||||
targetBlock,
|
||||
sourceHandle,
|
||||
targetHandle || 'target',
|
||||
'edit',
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdgeForTarget(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdgeForTarget(target)
|
||||
} else if (target?.block) {
|
||||
addEdgeForTarget(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdgeForTarget((targets as any).block, (targets as any).handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Handle edge removal
|
||||
if (params?.removeEdges && Array.isArray(params.removeEdges)) {
|
||||
params.removeEdges.forEach(({ targetBlockId, sourceHandle = 'source' }) => {
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) =>
|
||||
!(edge.source === block_id && edge.target === targetBlockId && edge.sourceHandle === sourceHandle)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function handleAddOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
|
||||
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } = ctx
|
||||
const { block_id, params } = op
|
||||
|
||||
const addNormalizedName = params?.name ? normalizeName(params.name) : ''
|
||||
if (!params?.type || !params?.name || !addNormalizedName) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Missing required params (type or name) for adding block "${block_id}"`,
|
||||
details: { hasType: !!params?.type, hasName: !!params?.name },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if ((RESERVED_BLOCK_NAMES as readonly string[]).includes(addNormalizedName)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'reserved_block_name',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Block name "${params.name}" is a reserved name and cannot be used`,
|
||||
details: { requestedName: params.name },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const conflictingBlock = findBlockWithDuplicateNormalizedName(modifiedState.blocks, params.name, block_id)
|
||||
|
||||
if (conflictingBlock) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'duplicate_block_name',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Block name "${params.name}" conflicts with existing block "${conflictingBlock[1].name}"`,
|
||||
details: {
|
||||
requestedName: params.name,
|
||||
conflictingBlockId: conflictingBlock[0],
|
||||
conflictingBlockName: conflictingBlock[1].name,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Special container types (loop, parallel) are not in the block registry but are valid
|
||||
const isContainerType = params.type === 'loop' || params.type === 'parallel'
|
||||
|
||||
// Validate block type before adding (skip validation for container types)
|
||||
const addBlockConfig = getBlock(params.type)
|
||||
if (!addBlockConfig && !isContainerType) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_block_type',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Invalid block type "${params.type}" - block not added`,
|
||||
details: { requestedType: params.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if block type is allowed by permission group
|
||||
if (!isContainerType && !isBlockTypeAllowed(params.type, permissionConfig)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_allowed',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Block type "${params.type}" is not allowed by permission group - block not added`,
|
||||
details: { requestedType: params.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const triggerIssue = TriggerUtils.getTriggerAdditionIssue(modifiedState.blocks, params.type)
|
||||
if (triggerIssue) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'duplicate_trigger',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Cannot add ${triggerIssue.triggerName} - a workflow can only have one`,
|
||||
details: { requestedType: params.type, issue: triggerIssue.issue },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check single-instance block constraints (e.g., Response block)
|
||||
const singleInstanceIssue = TriggerUtils.getSingleInstanceBlockIssue(modifiedState.blocks, params.type)
|
||||
if (singleInstanceIssue) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'duplicate_single_instance_block',
|
||||
operationType: 'add',
|
||||
blockId: block_id,
|
||||
reason: `Cannot add ${singleInstanceIssue.blockName} - a workflow can only have one`,
|
||||
details: { requestedType: params.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Create new block with proper structure
|
||||
const newBlock = createBlockFromParams(
|
||||
block_id,
|
||||
params,
|
||||
undefined,
|
||||
validationErrors,
|
||||
permissionConfig,
|
||||
skippedItems
|
||||
)
|
||||
|
||||
// Set loop/parallel data on parent block BEFORE adding to blocks (strict validation)
|
||||
if (params.nestedNodes) {
|
||||
if (params.type === 'loop') {
|
||||
const validLoopTypes = ['for', 'forEach', 'while', 'doWhile']
|
||||
const loopType =
|
||||
params.inputs?.loopType && validLoopTypes.includes(params.inputs.loopType)
|
||||
? params.inputs.loopType
|
||||
: 'for'
|
||||
newBlock.data = {
|
||||
...newBlock.data,
|
||||
loopType,
|
||||
// Only include type-appropriate fields
|
||||
...(loopType === 'forEach' && params.inputs?.collection && { collection: params.inputs.collection }),
|
||||
...(loopType === 'for' && params.inputs?.iterations && { count: params.inputs.iterations }),
|
||||
...(loopType === 'while' && params.inputs?.condition && { whileCondition: params.inputs.condition }),
|
||||
...(loopType === 'doWhile' &&
|
||||
params.inputs?.condition && { doWhileCondition: params.inputs.condition }),
|
||||
}
|
||||
} else if (params.type === 'parallel') {
|
||||
const validParallelTypes = ['count', 'collection']
|
||||
const parallelType =
|
||||
params.inputs?.parallelType && validParallelTypes.includes(params.inputs.parallelType)
|
||||
? params.inputs.parallelType
|
||||
: 'count'
|
||||
newBlock.data = {
|
||||
...newBlock.data,
|
||||
parallelType,
|
||||
// Only include type-appropriate fields
|
||||
...(parallelType === 'collection' &&
|
||||
params.inputs?.collection && { collection: params.inputs.collection }),
|
||||
...(parallelType === 'count' && params.inputs?.count && { count: params.inputs.count }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add parent block FIRST before adding children
|
||||
// This ensures children can reference valid parentId
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
|
||||
// Handle nested nodes (for loops/parallels created from scratch)
|
||||
if (params.nestedNodes) {
|
||||
// Defensive check: verify parent is not locked before adding children
|
||||
// (Parent was just created with locked: false, but check for consistency)
|
||||
const parentBlock = modifiedState.blocks[block_id]
|
||||
if (parentBlock?.locked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'add_nested_nodes',
|
||||
blockId: block_id,
|
||||
reason: `Container "${block_id}" is locked - cannot add nested nodes`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (childBlock.type === 'loop' || childBlock.type === 'parallel') {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'nested_subflow_not_allowed',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: childId,
|
||||
reason: `Cannot nest ${childBlock.type} inside ${params.type} - nested subflows are not supported`,
|
||||
details: { parentType: params.type, childType: childBlock.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
block_id,
|
||||
validationErrors,
|
||||
permissionConfig,
|
||||
skippedItems
|
||||
)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
if (childBlock.connections) {
|
||||
deferredConnections.push({
|
||||
blockId: childId,
|
||||
connections: childBlock.connections,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first (pass 2)
|
||||
if (params.connections) {
|
||||
deferredConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function handleInsertIntoSubflowOperation(
|
||||
op: EditWorkflowOperation,
|
||||
ctx: OperationContext
|
||||
): void {
|
||||
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } = ctx
|
||||
const { block_id, params } = op
|
||||
|
||||
const subflowId = params?.subflowId
|
||||
if (!subflowId || !params?.type || !params?.name) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Missing required params (subflowId, type, or name) for inserting block "${block_id}"`,
|
||||
details: {
|
||||
hasSubflowId: !!subflowId,
|
||||
hasType: !!params?.type,
|
||||
hasName: !!params?.name,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const subflowBlock = modifiedState.blocks[subflowId]
|
||||
if (!subflowBlock) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_subflow_parent',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Subflow block "${subflowId}" not found - block "${block_id}" not inserted`,
|
||||
details: { subflowId },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if subflow is locked
|
||||
if (subflowBlock.locked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Subflow "${subflowId}" is locked - cannot insert block "${block_id}"`,
|
||||
details: { subflowId },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (subflowBlock.type !== 'loop' && subflowBlock.type !== 'parallel') {
|
||||
logger.error('Subflow block has invalid type', {
|
||||
subflowId,
|
||||
type: subflowBlock.type,
|
||||
block_id,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (params.type === 'loop' || params.type === 'parallel') {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'nested_subflow_not_allowed',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Cannot nest ${params.type} inside ${subflowBlock.type} - nested subflows are not supported`,
|
||||
details: { parentType: subflowBlock.type, childType: params.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if block already exists (moving into subflow) or is new
|
||||
const existingBlock = modifiedState.blocks[block_id]
|
||||
|
||||
if (existingBlock) {
|
||||
if (existingBlock.type === 'loop' || existingBlock.type === 'parallel') {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'nested_subflow_not_allowed',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Cannot move ${existingBlock.type} into ${subflowBlock.type} - nested subflows are not supported`,
|
||||
details: { parentType: subflowBlock.type, childType: existingBlock.type },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if existing block is locked
|
||||
if (existingBlock.locked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" is locked and cannot be moved into a subflow`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Moving existing block into subflow - just update parent
|
||||
existingBlock.data = {
|
||||
...existingBlock.data,
|
||||
parentId: subflowId,
|
||||
extent: 'parent' as const,
|
||||
}
|
||||
|
||||
// Update inputs if provided (with validation)
|
||||
if (params.inputs) {
|
||||
// Validate inputs against block configuration
|
||||
const validationResult = validateInputsForBlock(existingBlock.type, params.inputs, block_id)
|
||||
validationErrors.push(...validationResult.errors)
|
||||
|
||||
Object.entries(validationResult.validInputs).forEach(([key, value]) => {
|
||||
// Skip runtime subblock IDs (webhookId, triggerPath)
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) {
|
||||
return
|
||||
}
|
||||
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
if (key === 'tools' && Array.isArray(value)) {
|
||||
sanitizedValue = filterDisallowedTools(
|
||||
normalizeTools(value),
|
||||
permissionConfig,
|
||||
block_id,
|
||||
skippedItems
|
||||
)
|
||||
}
|
||||
|
||||
// Special handling for responseFormat - normalize to ensure consistent format
|
||||
if (key === 'responseFormat' && value) {
|
||||
sanitizedValue = normalizeResponseFormat(value)
|
||||
}
|
||||
|
||||
if (!existingBlock.subBlocks[key]) {
|
||||
existingBlock.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: sanitizedValue,
|
||||
}
|
||||
} else {
|
||||
existingBlock.subBlocks[key].value = sanitizedValue
|
||||
}
|
||||
})
|
||||
|
||||
const existingBlockConfig = getBlock(existingBlock.type)
|
||||
if (existingBlockConfig) {
|
||||
updateCanonicalModesForInputs(
|
||||
existingBlock,
|
||||
Object.keys(validationResult.validInputs),
|
||||
existingBlockConfig
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Special container types (loop, parallel) are not in the block registry but are valid
|
||||
const isContainerType = params.type === 'loop' || params.type === 'parallel'
|
||||
|
||||
// Validate block type before creating (skip validation for container types)
|
||||
const insertBlockConfig = getBlock(params.type)
|
||||
if (!insertBlockConfig && !isContainerType) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'invalid_block_type',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Invalid block type "${params.type}" - block not inserted into subflow`,
|
||||
details: { requestedType: params.type, subflowId },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if block type is allowed by permission group
|
||||
if (!isContainerType && !isBlockTypeAllowed(params.type, permissionConfig)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_allowed',
|
||||
operationType: 'insert_into_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Block type "${params.type}" is not allowed by permission group - block not inserted`,
|
||||
details: { requestedType: params.type, subflowId },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Create new block as child of subflow
|
||||
const newBlock = createBlockFromParams(
|
||||
block_id,
|
||||
params,
|
||||
subflowId,
|
||||
validationErrors,
|
||||
permissionConfig,
|
||||
skippedItems
|
||||
)
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
}
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
// This is particularly important when multiple blocks are being inserted
|
||||
// and they have connections to each other
|
||||
if (params.connections) {
|
||||
// Remove existing edges from this block first
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add to deferred connections list
|
||||
deferredConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function handleExtractFromSubflowOperation(
|
||||
op: EditWorkflowOperation,
|
||||
ctx: OperationContext
|
||||
): void {
|
||||
const { modifiedState, skippedItems } = ctx
|
||||
const { block_id, params } = op
|
||||
|
||||
const subflowId = params?.subflowId
|
||||
if (!subflowId) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'extract_from_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Missing subflowId for extracting block "${block_id}"`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const block = modifiedState.blocks[block_id]
|
||||
if (!block) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_not_found',
|
||||
operationType: 'extract_from_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" not found for extraction`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if block is locked
|
||||
if (block.locked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'extract_from_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Block "${block_id}" is locked and cannot be extracted from subflow`,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Check if parent subflow is locked
|
||||
const parentSubflow = modifiedState.blocks[subflowId]
|
||||
if (parentSubflow?.locked) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'block_locked',
|
||||
operationType: 'extract_from_subflow',
|
||||
blockId: block_id,
|
||||
reason: `Subflow "${subflowId}" is locked - cannot extract block "${block_id}"`,
|
||||
details: { subflowId },
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Verify it's actually a child of this subflow
|
||||
if (block.data?.parentId !== subflowId) {
|
||||
logger.warn('Block is not a child of specified subflow', {
|
||||
block_id,
|
||||
actualParent: block.data?.parentId,
|
||||
specifiedParent: subflowId,
|
||||
})
|
||||
}
|
||||
|
||||
// Remove parent relationship
|
||||
if (block.data) {
|
||||
block.data.parentId = undefined
|
||||
block.data.extent = undefined
|
||||
}
|
||||
|
||||
// Note: We keep the block and its edges, just remove parent relationship
|
||||
// The block becomes a root-level block
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
|
||||
/** Selector subblock types that can be validated */
|
||||
export const SELECTOR_TYPES = new Set([
|
||||
'oauth-input',
|
||||
'knowledge-base-selector',
|
||||
'document-selector',
|
||||
'file-selector',
|
||||
'project-selector',
|
||||
'channel-selector',
|
||||
'folder-selector',
|
||||
'mcp-server-selector',
|
||||
'mcp-tool-selector',
|
||||
'workflow-selector',
|
||||
])
|
||||
|
||||
const validationLogger = createLogger('EditWorkflowValidation')
|
||||
|
||||
/** UUID v4 regex pattern for validation */
|
||||
export const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
|
||||
|
||||
/**
|
||||
* Validation error for a specific field
|
||||
*/
|
||||
export interface ValidationError {
|
||||
blockId: string
|
||||
blockType: string
|
||||
field: string
|
||||
value: any
|
||||
error: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Types of items that can be skipped during operation application
|
||||
*/
|
||||
export type SkippedItemType =
|
||||
| 'block_not_found'
|
||||
| 'invalid_block_type'
|
||||
| 'block_not_allowed'
|
||||
| 'block_locked'
|
||||
| 'tool_not_allowed'
|
||||
| 'invalid_edge_target'
|
||||
| 'invalid_edge_source'
|
||||
| 'invalid_source_handle'
|
||||
| 'invalid_target_handle'
|
||||
| 'invalid_subblock_field'
|
||||
| 'missing_required_params'
|
||||
| 'invalid_subflow_parent'
|
||||
| 'nested_subflow_not_allowed'
|
||||
| 'duplicate_block_name'
|
||||
| 'reserved_block_name'
|
||||
| 'duplicate_trigger'
|
||||
| 'duplicate_single_instance_block'
|
||||
|
||||
/**
|
||||
* Represents an item that was skipped during operation application
|
||||
*/
|
||||
export interface SkippedItem {
|
||||
type: SkippedItemType
|
||||
operationType: string
|
||||
blockId: string
|
||||
reason: string
|
||||
details?: Record<string, any>
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs and records a skipped item
|
||||
*/
|
||||
export function logSkippedItem(skippedItems: SkippedItem[], item: SkippedItem): void {
|
||||
validationLogger.warn(`Skipped ${item.operationType} operation: ${item.reason}`, {
|
||||
type: item.type,
|
||||
operationType: item.operationType,
|
||||
blockId: item.blockId,
|
||||
...(item.details && { details: item.details }),
|
||||
})
|
||||
skippedItems.push(item)
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of input validation
|
||||
*/
|
||||
export interface ValidationResult {
|
||||
validInputs: Record<string, any>
|
||||
errors: ValidationError[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of validating a single value
|
||||
*/
|
||||
export interface ValueValidationResult {
|
||||
valid: boolean
|
||||
value?: any
|
||||
error?: ValidationError
|
||||
}
|
||||
|
||||
export interface EditWorkflowOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete' | 'insert_into_subflow' | 'extract_from_subflow'
|
||||
block_id: string
|
||||
params?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface EditWorkflowParams {
|
||||
operations: EditWorkflowOperation[]
|
||||
workflowId: string
|
||||
currentUserWorkflow?: string
|
||||
}
|
||||
|
||||
export interface EdgeHandleValidationResult {
|
||||
valid: boolean
|
||||
error?: string
|
||||
/** The normalized handle to use (e.g., simple 'if' normalized to 'condition-{uuid}') */
|
||||
normalizedHandle?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of applying operations to workflow state
|
||||
*/
|
||||
export interface ApplyOperationsResult {
|
||||
state: any
|
||||
validationErrors: ValidationError[]
|
||||
skippedItems: SkippedItem[]
|
||||
}
|
||||
|
||||
export interface OperationContext {
|
||||
modifiedState: any
|
||||
skippedItems: SkippedItem[]
|
||||
validationErrors: ValidationError[]
|
||||
permissionConfig: PermissionGroupConfig | null
|
||||
deferredConnections: Array<{
|
||||
blockId: string
|
||||
connections: Record<string, any>
|
||||
}>
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -96,6 +96,7 @@ function normalizeErrorMessage(errorValue: unknown): string | undefined {
|
||||
try {
|
||||
return String(errorValue)
|
||||
} catch {
|
||||
// JSON.stringify failed for error value; fall back to undefined
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
2
apps/sim/lib/workflows/blocks/index.ts
Normal file
2
apps/sim/lib/workflows/blocks/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { BlockSchemaResolver, blockSchemaResolver } from './schema-resolver'
|
||||
export type { ResolvedBlock, ResolvedSubBlock, ResolvedOption, ResolvedOutput } from './schema-types'
|
||||
201
apps/sim/lib/workflows/blocks/schema-resolver.ts
Normal file
201
apps/sim/lib/workflows/blocks/schema-resolver.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||
import type { ResolvedBlock, ResolvedOption, ResolvedOutput, ResolvedSubBlock } from './schema-types'
|
||||
|
||||
const logger = createLogger('BlockSchemaResolver')
|
||||
|
||||
/**
|
||||
* BlockSchemaResolver provides typed access to block configurations.
|
||||
*
|
||||
* It wraps the raw block registry and returns resolved, typed schemas
|
||||
* that consumers can use without any type assertions.
|
||||
*/
|
||||
export class BlockSchemaResolver {
|
||||
private cache = new Map<string, ResolvedBlock>()
|
||||
|
||||
/** Resolve a single block by type */
|
||||
resolveBlock(type: string): ResolvedBlock | null {
|
||||
const cached = this.cache.get(type)
|
||||
if (cached) return cached
|
||||
|
||||
const config = getBlock(type)
|
||||
if (!config) return null
|
||||
|
||||
const resolved = this.buildResolvedBlock(config)
|
||||
this.cache.set(type, resolved)
|
||||
return resolved
|
||||
}
|
||||
|
||||
/** Resolve all available blocks */
|
||||
resolveAllBlocks(options?: { includeHidden?: boolean }): ResolvedBlock[] {
|
||||
const configs = getAllBlocks()
|
||||
return configs
|
||||
.filter((config) => options?.includeHidden || !config.hideFromToolbar)
|
||||
.map((config) => this.resolveBlock(config.type))
|
||||
.filter((block): block is ResolvedBlock => block !== null)
|
||||
}
|
||||
|
||||
/** Clear the cache (call when block registry changes) */
|
||||
clearCache(): void {
|
||||
this.cache.clear()
|
||||
}
|
||||
|
||||
private buildResolvedBlock(config: BlockConfig): ResolvedBlock {
|
||||
return {
|
||||
type: config.type,
|
||||
name: config.name,
|
||||
description: config.description,
|
||||
category: config.category,
|
||||
icon: config.icon as unknown as ResolvedBlock['icon'],
|
||||
isTrigger: this.isTriggerBlock(config),
|
||||
hideFromToolbar: config.hideFromToolbar ?? false,
|
||||
subBlocks: config.subBlocks.map((subBlock) => this.resolveSubBlock(subBlock)),
|
||||
outputs: this.resolveOutputs(config),
|
||||
supportsTriggerMode: this.supportsTriggerMode(config),
|
||||
hasAdvancedMode: config.subBlocks.some((subBlock) => subBlock.mode === 'advanced'),
|
||||
raw: config,
|
||||
}
|
||||
}
|
||||
|
||||
private resolveSubBlock(sb: SubBlockConfig): ResolvedSubBlock {
|
||||
const resolved: ResolvedSubBlock = {
|
||||
id: sb.id,
|
||||
type: sb.type,
|
||||
label: sb.title,
|
||||
placeholder: sb.placeholder,
|
||||
required: typeof sb.required === 'boolean' ? sb.required : undefined,
|
||||
password: sb.password,
|
||||
hasCondition: Boolean(sb.condition),
|
||||
defaultValue: sb.defaultValue,
|
||||
validation: {
|
||||
min: sb.min,
|
||||
max: sb.max,
|
||||
pattern: this.resolvePattern(sb),
|
||||
},
|
||||
}
|
||||
|
||||
const condition = this.resolveCondition(sb)
|
||||
if (condition) {
|
||||
resolved.condition = condition
|
||||
}
|
||||
|
||||
const options = this.resolveOptions(sb)
|
||||
if (options.length > 0) {
|
||||
resolved.options = options
|
||||
}
|
||||
|
||||
if (!resolved.validation?.min && !resolved.validation?.max && !resolved.validation?.pattern) {
|
||||
delete resolved.validation
|
||||
}
|
||||
|
||||
return resolved
|
||||
}
|
||||
|
||||
private resolveCondition(sb: SubBlockConfig): ResolvedSubBlock['condition'] | undefined {
|
||||
try {
|
||||
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
|
||||
if (!condition || typeof condition !== 'object') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return {
|
||||
field: String(condition.field),
|
||||
value: condition.value,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve sub-block condition', {
|
||||
subBlockId: sb.id,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
private resolveOptions(sb: SubBlockConfig): ResolvedOption[] {
|
||||
try {
|
||||
if (Array.isArray(sb.options)) {
|
||||
return sb.options.map((opt) => {
|
||||
if (typeof opt === 'string') {
|
||||
return { label: opt, value: opt }
|
||||
}
|
||||
|
||||
const label = String(opt.label || opt.id || '')
|
||||
const value = String(opt.id || opt.label || '')
|
||||
|
||||
return {
|
||||
label,
|
||||
value,
|
||||
id: opt.id,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// For function-based or dynamic options, return empty.
|
||||
// Consumers can evaluate these options if they need runtime resolution.
|
||||
return []
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve sub-block options', {
|
||||
subBlockId: sb.id,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
private resolveOutputs(config: BlockConfig): ResolvedOutput[] {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const blockOutputs = require('@/lib/workflows/blocks/block-outputs') as {
|
||||
getBlockOutputPaths: (
|
||||
blockType: string,
|
||||
subBlocks?: Record<string, unknown>,
|
||||
triggerMode?: boolean
|
||||
) => string[]
|
||||
}
|
||||
|
||||
const paths = blockOutputs.getBlockOutputPaths(config.type, {}, false)
|
||||
return paths.map((path) => ({
|
||||
name: path,
|
||||
type: 'string',
|
||||
}))
|
||||
} catch (error) {
|
||||
logger.warn('Failed to resolve block outputs, using fallback', {
|
||||
blockType: config.type,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return [{ name: 'result', type: 'string' }]
|
||||
}
|
||||
}
|
||||
|
||||
private isTriggerBlock(config: BlockConfig): boolean {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const triggerUtils = require('@/lib/workflows/triggers/input-definition-triggers') as {
|
||||
isInputDefinitionTrigger: (blockType: string) => boolean
|
||||
}
|
||||
return triggerUtils.isInputDefinitionTrigger(config.type)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to detect trigger block, using fallback', {
|
||||
blockType: config.type,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return config.type === 'starter'
|
||||
}
|
||||
}
|
||||
|
||||
private supportsTriggerMode(config: BlockConfig): boolean {
|
||||
return Boolean(
|
||||
config.triggerAllowed ||
|
||||
config.subBlocks.some((subBlock) => subBlock.id === 'triggerMode' || subBlock.mode === 'trigger')
|
||||
)
|
||||
}
|
||||
|
||||
private resolvePattern(sb: SubBlockConfig): string | undefined {
|
||||
const maybePattern = (sb as SubBlockConfig & { pattern?: string }).pattern
|
||||
return typeof maybePattern === 'string' ? maybePattern : undefined
|
||||
}
|
||||
}
|
||||
|
||||
/** Singleton resolver instance */
|
||||
export const blockSchemaResolver = new BlockSchemaResolver()
|
||||
75
apps/sim/lib/workflows/blocks/schema-types.ts
Normal file
75
apps/sim/lib/workflows/blocks/schema-types.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
|
||||
/** A fully resolved block schema with all sub-blocks expanded */
|
||||
export interface ResolvedBlock {
|
||||
type: string
|
||||
name: string
|
||||
description?: string
|
||||
category: string
|
||||
icon?: LucideIcon
|
||||
isTrigger: boolean
|
||||
hideFromToolbar: boolean
|
||||
|
||||
/** Resolved sub-blocks with options, conditions, and validation info */
|
||||
subBlocks: ResolvedSubBlock[]
|
||||
|
||||
/** Block-level outputs */
|
||||
outputs: ResolvedOutput[]
|
||||
|
||||
/** Whether this block supports trigger mode */
|
||||
supportsTriggerMode: boolean
|
||||
|
||||
/** Whether this block has advanced mode */
|
||||
hasAdvancedMode: boolean
|
||||
|
||||
/** Raw config reference for consumers that need it */
|
||||
raw: unknown
|
||||
}
|
||||
|
||||
/** A resolved sub-block with options and metadata */
|
||||
export interface ResolvedSubBlock {
|
||||
id: string
|
||||
type: string
|
||||
label?: string
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
password?: boolean
|
||||
|
||||
/** Resolved options (for dropdowns/selectors, etc.) */
|
||||
options?: ResolvedOption[]
|
||||
|
||||
/** Whether this sub-block has a condition that controls visibility */
|
||||
hasCondition: boolean
|
||||
|
||||
/** Condition details if present */
|
||||
condition?: {
|
||||
field: string
|
||||
value: unknown
|
||||
/** Whether condition is currently met (if evaluable statically) */
|
||||
met?: boolean
|
||||
}
|
||||
|
||||
/** Validation constraints */
|
||||
validation?: {
|
||||
min?: number
|
||||
max?: number
|
||||
pattern?: string
|
||||
}
|
||||
|
||||
/** Default value */
|
||||
defaultValue?: unknown
|
||||
}
|
||||
|
||||
/** A resolved option for dropdowns/selectors */
|
||||
export interface ResolvedOption {
|
||||
label: string
|
||||
value: string
|
||||
id?: string
|
||||
}
|
||||
|
||||
/** A resolved output definition */
|
||||
export interface ResolvedOutput {
|
||||
name: string
|
||||
type: string
|
||||
description?: string
|
||||
}
|
||||
Reference in New Issue
Block a user