improvement(copilot): state persistence, subflow recreation, dynamic handle topologies (#3569)

* improvement(copilot): state persistence, subflow recreation, dynamic handle topologies

* address comments
This commit is contained in:
Vikhyath Mondreti
2026-03-13 17:47:02 -07:00
committed by GitHub
parent 92290029f0
commit 7e740e617b
33 changed files with 1375 additions and 312 deletions

View File

@@ -19,6 +19,11 @@ const {
vi.mock('@/lib/auth/hybrid', () => ({
checkHybridAuth: mockCheckHybridAuth,
AuthType: {
SESSION: 'session',
API_KEY: 'api_key',
INTERNAL_JWT: 'internal_jwt',
},
}))
vi.mock('@/lib/workflows/utils', () => ({

View File

@@ -11,6 +11,7 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { validateEdges } from '@/stores/workflows/workflow/edge-validation'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
@@ -180,12 +181,16 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
const typedBlocks = filteredBlocks as Record<string, BlockState>
const validatedEdges = validateEdges(state.edges as WorkflowState['edges'], typedBlocks)
const validationWarnings = validatedEdges.dropped.map(
({ edge, reason }) => `Dropped edge "${edge.id}": ${reason}`
)
const canonicalLoops = generateLoopBlocks(typedBlocks)
const canonicalParallels = generateParallelBlocks(typedBlocks)
const workflowState = {
blocks: filteredBlocks,
edges: state.edges,
edges: validatedEdges.valid,
loops: canonicalLoops,
parallels: canonicalParallels,
lastSaved: state.lastSaved || Date.now(),
@@ -276,7 +281,10 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
}
return NextResponse.json({ success: true, warnings }, { status: 200 })
return NextResponse.json(
{ success: true, warnings: [...warnings, ...validationWarnings] },
{ status: 200 }
)
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(

View File

@@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger'
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
import { useParams } from 'next/navigation'
import Editor from 'react-simple-code-editor'
import { useUpdateNodeInternals } from 'reactflow'
import {
Button,
Code,
@@ -173,7 +172,6 @@ export function ConditionInput({
const [visualLineHeights, setVisualLineHeights] = useState<{
[key: string]: number[]
}>({})
const updateNodeInternals = useUpdateNodeInternals()
const batchRemoveEdges = useWorkflowStore((state) => state.batchRemoveEdges)
const edges = useWorkflowStore((state) => state.edges)
@@ -352,17 +350,8 @@ export function ConditionInput({
if (newValue !== prevStoreValueRef.current) {
prevStoreValueRef.current = newValue
setStoreValue(newValue)
updateNodeInternals(blockId)
}
}, [
conditionalBlocks,
blockId,
subBlockId,
setStoreValue,
updateNodeInternals,
isReady,
isPreview,
])
}, [conditionalBlocks, blockId, subBlockId, setStoreValue, isReady, isPreview])
// Cleanup when component unmounts
useEffect(() => {
@@ -708,8 +697,6 @@ export function ConditionInput({
shouldPersistRef.current = true
setConditionalBlocks((blocks) => updateBlockTitles(blocks.filter((block) => block.id !== id)))
setTimeout(() => updateNodeInternals(blockId), 0)
}
const moveBlock = (id: string, direction: 'up' | 'down') => {
@@ -737,8 +724,6 @@ export function ConditionInput({
]
shouldPersistRef.current = true
setConditionalBlocks(updateBlockTitles(newBlocks))
setTimeout(() => updateNodeInternals(blockId), 0)
}
// Add useEffect to handle keyboard events for both dropdowns

View File

@@ -198,14 +198,14 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
</div>
{/*
* Click-catching background — selects this subflow when the body area is clicked.
* No event bubbling concern: ReactFlow renders child nodes as viewport-level siblings,
* not as DOM children of this component, so child clicks never reach this div.
* Subflow body background. Uses pointer-events: none so that edges rendered
* inside the subflow remain clickable. The subflow node wrapper also has
* pointer-events: none (set in workflow.tsx), so body-area clicks pass
* through to the pane. Subflow selection is done via the header above.
*/}
<div
className='absolute inset-0 top-[44px] rounded-b-[8px]'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
onClick={() => setCurrentBlockId(id)}
style={{ pointerEvents: 'none' }}
/>
{!isPreview && (

View File

@@ -11,6 +11,7 @@ import { createMcpToolId } from '@/lib/mcp/shared'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import type { FilterRule, SortRule } from '@/lib/table/types'
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { getConditionRows, getRouterRows } from '@/lib/workflows/dynamic-handle-topology'
import {
buildCanonicalIndex,
evaluateSubBlockCondition,
@@ -1049,6 +1050,9 @@ export const WorkflowBlock = memo(function WorkflowBlock({
const subBlockRows = subBlockRowsData.rows
const subBlockState = subBlockRowsData.stateToUse
const topologySubBlocks = data.isPreview
? (data.blockState?.subBlocks ?? {})
: (currentStoreBlock?.subBlocks ?? {})
const effectiveAdvanced = useMemo(() => {
const rawValues = Object.entries(subBlockState).reduce<Record<string, unknown>>(
(acc, [key, entry]) => {
@@ -1108,34 +1112,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
*/
const conditionRows = useMemo(() => {
if (type !== 'condition') return [] as { id: string; title: string; value: string }[]
const conditionsValue = subBlockState.conditions?.value
const raw = typeof conditionsValue === 'string' ? conditionsValue : undefined
try {
if (raw) {
const parsed = JSON.parse(raw) as unknown
if (Array.isArray(parsed)) {
return parsed.map((item: unknown, index: number) => {
const conditionItem = item as { id?: string; value?: unknown }
const title = index === 0 ? 'if' : index === parsed.length - 1 ? 'else' : 'else if'
return {
id: conditionItem?.id ?? `${id}-cond-${index}`,
title,
value: typeof conditionItem?.value === 'string' ? conditionItem.value : '',
}
})
}
}
} catch (error) {
logger.warn('Failed to parse condition subblock value', { error, blockId: id })
}
return [
{ id: `${id}-if`, title: 'if', value: '' },
{ id: `${id}-else`, title: 'else', value: '' },
]
}, [type, subBlockState, id])
return getConditionRows(id, topologySubBlocks.conditions?.value)
}, [type, topologySubBlocks, id])
/**
* Compute per-route rows (id/value) for router_v2 blocks so we can render
@@ -1144,31 +1122,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({
*/
const routerRows = useMemo(() => {
if (type !== 'router_v2') return [] as { id: string; value: string }[]
const routesValue = subBlockState.routes?.value
const raw = typeof routesValue === 'string' ? routesValue : undefined
try {
if (raw) {
const parsed = JSON.parse(raw) as unknown
if (Array.isArray(parsed)) {
return parsed.map((item: unknown, index: number) => {
const routeItem = item as { id?: string; value?: string }
return {
// Use stable ID format that matches ConditionInput's generateStableId
id: routeItem?.id ?? `${id}-route${index + 1}`,
value: routeItem?.value ?? '',
}
})
}
}
} catch (error) {
logger.warn('Failed to parse router routes value', { error, blockId: id })
}
// Fallback must match ConditionInput's default: generateStableId(blockId, 'route1') = `${blockId}-route1`
return [{ id: `${id}-route1`, value: '' }]
}, [type, subBlockState, id])
return getRouterRows(id, topologySubBlocks.routes?.value)
}, [type, topologySubBlocks, id])
/**
* Compute and publish deterministic layout metrics for workflow blocks.

View File

@@ -6,6 +6,7 @@ export { useBlockOutputFields } from './use-block-output-fields'
export { useBlockVisual } from './use-block-visual'
export { useCanvasContextMenu } from './use-canvas-context-menu'
export { type CurrentWorkflow, useCurrentWorkflow } from './use-current-workflow'
export { useDynamicHandleRefresh } from './use-dynamic-handle-refresh'
export { useNodeUtilities } from './use-node-utilities'
export { usePreventZoom } from './use-prevent-zoom'
export { useScrollManagement } from './use-scroll-management'

View File

@@ -0,0 +1,33 @@
import { useEffect, useMemo, useRef } from 'react'
import { useUpdateNodeInternals } from 'reactflow'
import {
collectDynamicHandleTopologySignatures,
getChangedDynamicHandleBlockIds,
} from '@/lib/workflows/dynamic-handle-topology'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
export function useDynamicHandleRefresh() {
const updateNodeInternals = useUpdateNodeInternals()
const blocks = useWorkflowStore((state) => state.blocks)
const previousSignaturesRef = useRef<Map<string, string>>(new Map())
const signatures = useMemo(() => collectDynamicHandleTopologySignatures(blocks), [blocks])
useEffect(() => {
const changedBlockIds = getChangedDynamicHandleBlockIds(
previousSignaturesRef.current,
signatures
)
previousSignaturesRef.current = signatures
if (changedBlockIds.length === 0) {
return
}
const frameId = requestAnimationFrame(() => {
changedBlockIds.forEach((blockId) => updateNodeInternals(blockId))
})
return () => cancelAnimationFrame(frameId)
}, [signatures, updateNodeInternals])
}

View File

@@ -116,7 +116,7 @@ export async function applyAutoLayoutAndUpdateStore(
lastSaved: Date.now(),
}
useWorkflowStore.setState(newWorkflowState)
useWorkflowStore.getState().replaceWorkflowState(newWorkflowState)
logger.info('Successfully updated workflow store with auto layout', { workflowId })
@@ -168,9 +168,9 @@ export async function applyAutoLayoutAndUpdateStore(
})
// Revert the store changes since database save failed
useWorkflowStore.setState({
useWorkflowStore.getState().replaceWorkflowState({
...workflowStore.getWorkflowState(),
blocks: blocks,
blocks,
lastSaved: workflowStore.lastSaved,
})

View File

@@ -1,6 +1,7 @@
export * from './auto-layout-utils'
export * from './block-protection-utils'
export * from './block-ring-utils'
export * from './node-derivation'
export * from './node-position-utils'
export * from './workflow-canvas-helpers'
export * from './workflow-execution-utils'

View File

@@ -0,0 +1,32 @@
import type { BlockState } from '@/stores/workflows/workflow/types'
export const Z_INDEX = {
ROOT_BLOCK: 10,
CHILD_BLOCK: 1000,
} as const
export function computeContainerZIndex(
block: Pick<BlockState, 'data'>,
allBlocks: Record<string, Pick<BlockState, 'data'>>
): number {
let depth = 0
let parentId = block.data?.parentId
while (parentId && depth < 100) {
depth++
parentId = allBlocks[parentId]?.data?.parentId
}
return depth
}
export function computeBlockZIndex(
block: Pick<BlockState, 'type' | 'data'>,
allBlocks: Record<string, Pick<BlockState, 'type' | 'data'>>
): number {
if (block.type === 'loop' || block.type === 'parallel') {
return computeContainerZIndex(block, allBlocks)
}
return block.data?.parentId ? Z_INDEX.CHILD_BLOCK : Z_INDEX.ROOT_BLOCK
}

View File

@@ -45,6 +45,7 @@ import {
useAutoLayout,
useCanvasContextMenu,
useCurrentWorkflow,
useDynamicHandleRefresh,
useNodeUtilities,
useShiftSelectionLock,
useWorkflowExecution,
@@ -53,6 +54,7 @@ import {
calculateContainerDimensions,
clampPositionToContainer,
clearDragHighlights,
computeBlockZIndex,
computeClampedPositionUpdates,
estimateBlockDimensions,
filterProtectedBlocks,
@@ -64,6 +66,7 @@ import {
isInEditableElement,
resolveParentChildSelectionConflicts,
validateTriggerPaste,
Z_INDEX,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import { getBlock } from '@/blocks'
@@ -248,6 +251,7 @@ const WorkflowContent = React.memo(() => {
const { screenToFlowPosition, getNodes, setNodes, getIntersectingNodes } = reactFlowInstance
const { fitViewToBounds, getViewportCenter } = useCanvasViewport(reactFlowInstance)
const { emitCursorUpdate } = useSocket()
useDynamicHandleRefresh()
const workspaceId = params.workspaceId as string
const workflowIdParam = params.workflowId as string
@@ -727,6 +731,7 @@ const WorkflowContent = React.memo(() => {
...node,
position: update.newPosition,
parentId: update.newParentId ?? undefined,
zIndex: update.newParentId ? Z_INDEX.CHILD_BLOCK : Z_INDEX.ROOT_BLOCK,
}
}
return node
@@ -2364,13 +2369,6 @@ const WorkflowContent = React.memo(() => {
// Handle container nodes differently
if (block.type === 'loop' || block.type === 'parallel') {
// Compute nesting depth so children always render above parents
let depth = 0
let pid = block.data?.parentId as string | undefined
while (pid && depth < 100) {
depth++
pid = blocks[pid]?.data?.parentId as string | undefined
}
nodeArray.push({
id: block.id,
type: 'subflowNode',
@@ -2379,8 +2377,9 @@ const WorkflowContent = React.memo(() => {
extent: block.data?.extent || undefined,
dragHandle: '.workflow-drag-handle',
draggable: !isBlockProtected(block.id, blocks),
zIndex: depth,
zIndex: computeBlockZIndex(block, blocks),
className: block.data?.parentId ? 'nested-subflow-node' : undefined,
style: { pointerEvents: 'none' },
data: {
...block.data,
name: block.name,
@@ -2409,12 +2408,6 @@ const WorkflowContent = React.memo(() => {
const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock'
const dragHandle = block.type === 'note' ? '.note-drag-handle' : '.workflow-drag-handle'
// Compute zIndex for blocks inside containers so they render above the
// parent subflow's interactive body area (which needs pointer-events for
// click-to-select). Container nodes use zIndex: depth (0, 1, 2...),
// so child blocks use a baseline that is always above any container.
const childZIndex = block.data?.parentId ? 1000 : undefined
// Create stable node object - React Flow will handle shallow comparison
nodeArray.push({
id: block.id,
@@ -2423,7 +2416,7 @@ const WorkflowContent = React.memo(() => {
parentId: block.data?.parentId,
dragHandle,
draggable: !isBlockProtected(block.id, blocks),
...(childZIndex !== undefined && { zIndex: childZIndex }),
zIndex: computeBlockZIndex(block, blocks),
extent: (() => {
// Clamp children to subflow body (exclude header)
const parentId = block.data?.parentId as string | undefined
@@ -2609,6 +2602,7 @@ const WorkflowContent = React.memo(() => {
position: absPos,
parentId: undefined,
extent: undefined,
zIndex: Z_INDEX.ROOT_BLOCK,
}
}
return n
@@ -3330,6 +3324,7 @@ const WorkflowContent = React.memo(() => {
position: relativePositionBefore,
parentId: potentialParentId,
extent: 'parent' as const,
zIndex: Z_INDEX.CHILD_BLOCK,
}
}
return n
@@ -3372,6 +3367,7 @@ const WorkflowContent = React.memo(() => {
position: absolutePosition,
parentId: undefined,
extent: undefined,
zIndex: Z_INDEX.ROOT_BLOCK,
}
}
return n
@@ -3594,12 +3590,43 @@ const WorkflowContent = React.memo(() => {
const handleNodeClick = useCallback(
(event: React.MouseEvent, node: Node) => {
const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey
setNodes((nodes) =>
nodes.map((n) => ({
setNodes((nodes) => {
const updated = nodes.map((n) => ({
...n,
selected: isMultiSelect ? (n.id === node.id ? true : n.selected) : n.id === node.id,
}))
)
if (!isMultiSelect) return updated
const clickedId = node.id
const clickedParentId = node.parentId
const selectedIds = new Set(updated.filter((n) => n.selected).map((n) => n.id))
let hasConflict = false
const resolved = updated.map((n) => {
if (!n.selected || n.id === clickedId) return n
const nParentId = n.parentId
if (nParentId === clickedId) {
hasConflict = true
return { ...n, selected: false }
}
if (clickedParentId === n.id) {
hasConflict = true
return { ...n, selected: false }
}
if (nParentId && selectedIds.has(nParentId)) {
hasConflict = true
return { ...n, selected: false }
}
return n
})
return hasConflict ? resolved : updated
})
},
[setNodes]
)

View File

@@ -475,6 +475,7 @@ export function useCollaborativeWorkflow() {
try {
useSubBlockStore.getState().setValue(blockId, subblockId, value)
useWorkflowStore.getState().syncDynamicHandleSubblockValue(blockId, subblockId, value)
const blockType = useWorkflowStore.getState().blocks?.[blockId]?.type
if (activeWorkflowId && blockType === 'function' && subblockId === 'code') {
useCodeUndoRedoStore.getState().clear(activeWorkflowId, blockId, subblockId)
@@ -555,7 +556,7 @@ export function useCollaborativeWorkflow() {
isApplyingRemoteChange.current = true
try {
// Update the main workflow state using the API response
useWorkflowStore.setState({
useWorkflowStore.getState().replaceWorkflowState({
blocks: workflowData.state.blocks || {},
edges: workflowData.state.edges || [],
loops: workflowData.state.loops || {},
@@ -1230,6 +1231,7 @@ export function useCollaborativeWorkflow() {
// ALWAYS update local store first for immediate UI feedback
useSubBlockStore.getState().setValue(blockId, subblockId, value)
useWorkflowStore.getState().syncDynamicHandleSubblockValue(blockId, subblockId, value)
if (activeWorkflowId) {
const operationId = crypto.randomUUID()

View File

@@ -13,9 +13,17 @@ const agentBlockConfig = {
subBlocks: [{ id: 'responseFormat', type: 'response-format' }],
}
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [agentBlockConfig],
getBlock: (type: string) => (type === 'agent' ? agentBlockConfig : undefined),
getAllBlocks: () => [agentBlockConfig, conditionBlockConfig],
getBlock: (type: string) =>
type === 'agent' ? agentBlockConfig : type === 'condition' ? conditionBlockConfig : undefined,
}))
describe('createBlockFromParams', () => {
@@ -41,4 +49,24 @@ describe('createBlockFromParams', () => {
expect(block.outputs.answer).toBeDefined()
expect(block.outputs.answer.type).toBe('string')
})
it('preserves configured subblock types and normalizes condition branch ids', () => {
const block = createBlockFromParams('condition-1', {
type: 'condition',
name: 'Condition 1',
inputs: {
conditions: JSON.stringify([
{ id: 'arbitrary-if', title: 'if', value: 'true' },
{ id: 'arbitrary-else', title: 'else', value: '' },
]),
},
triggerMode: false,
})
expect(block.subBlocks.conditions.type).toBe('condition-input')
const parsed = JSON.parse(block.subBlocks.conditions.value)
expect(parsed[0].id).toBe('condition-1-if')
expect(parsed[1].id).toBe('condition-1-else')
})
})

View File

@@ -99,6 +99,8 @@ export function createBlockFromParams(
sanitizedValue = normalizeArrayWithIds(value)
}
sanitizedValue = normalizeConditionRouterIds(blockId, key, sanitizedValue)
// Special handling for tools - normalize and filter disallowed
if (key === 'tools' && Array.isArray(value)) {
sanitizedValue = filterDisallowedTools(
@@ -114,9 +116,10 @@ export function createBlockFromParams(
sanitizedValue = normalizeResponseFormat(value)
}
const subBlockDef = blockConfig?.subBlocks.find((subBlock) => subBlock.id === key)
blockState.subBlocks[key] = {
id: key,
type: 'short-input',
type: subBlockDef?.type || 'short-input',
value: sanitizedValue,
}
})
@@ -272,6 +275,52 @@ export function shouldNormalizeArrayIds(key: string): boolean {
return ARRAY_WITH_ID_SUBBLOCK_TYPES.has(key)
}
/**
* Normalizes condition/router branch IDs to use canonical block-scoped format.
* The LLM provides branch structure (if/else-if/else or routes) but should not
* have to generate the internal IDs -- we assign them based on the block ID.
*/
export function normalizeConditionRouterIds(blockId: string, key: string, value: unknown): unknown {
if (key !== 'conditions' && key !== 'routes') return value
let parsed: any[]
if (typeof value === 'string') {
try {
parsed = JSON.parse(value)
if (!Array.isArray(parsed)) return value
} catch {
return value
}
} else if (Array.isArray(value)) {
parsed = value
} else {
return value
}
let elseIfCounter = 0
const normalized = parsed.map((item, index) => {
if (!item || typeof item !== 'object') return item
let canonicalId: string
if (key === 'conditions') {
if (index === 0) {
canonicalId = `${blockId}-if`
} else if (index === parsed.length - 1) {
canonicalId = `${blockId}-else`
} else {
canonicalId = `${blockId}-else-if-${elseIfCounter}`
elseIfCounter++
}
} else {
canonicalId = `${blockId}-route${index + 1}`
}
return { ...item, id: canonicalId }
})
return typeof value === 'string' ? JSON.stringify(normalized) : normalized
}
/**
* Normalize responseFormat to ensure consistent storage
* Handles both string (JSON) and object formats

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
import { validateEdges } from '@/stores/workflows/workflow/edge-validation'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { addConnectionsAsEdges, normalizeBlockIdsInOperations } from './builders'
import {
@@ -213,8 +214,8 @@ export function applyOperationsToWorkflowState(
handler(operation, ctx)
}
// Pass 2: Add all deferred connections from add/insert operations
// Now all blocks exist, so connections can be safely created
// Pass 2: Create all edges from deferred connections
// All blocks exist at this point, so forward references resolve correctly
if (ctx.deferredConnections.length > 0) {
logger.info('Processing deferred connections from add/insert operations', {
deferredConnectionCount: ctx.deferredConnections.length,
@@ -238,6 +239,12 @@ export function applyOperationsToWorkflowState(
totalEdges: (modifiedState as any).edges?.length,
})
}
// Remove edges that cross scope boundaries. This runs after all operations
// and deferred connections are applied so that every block has its final
// parentId. Running it per-operation would incorrectly drop edges between
// blocks that are both being moved into the same subflow in one batch.
removeInvalidScopeEdges(modifiedState, skippedItems)
// Regenerate loops and parallels after modifications
;(modifiedState as any).loops = generateLoopBlocks((modifiedState as any).blocks)
@@ -272,3 +279,42 @@ export function applyOperationsToWorkflowState(
return { state: modifiedState, validationErrors, skippedItems }
}
/**
* Removes edges that cross scope boundaries after all operations are applied.
* An edge is invalid if:
* - Either endpoint no longer exists (dangling reference)
* - The source and target are in incompatible scopes
* - A child block connects to its own parent container (non-handle edge)
*
* Valid scope relationships:
* - Same scope: both blocks share the same parentId
* - Container→child: source is the parent container of the target (start handles)
* - Child→container: target is the parent container of the source (end handles)
*/
function removeInvalidScopeEdges(modifiedState: any, skippedItems: SkippedItem[]): void {
const { valid, dropped } = validateEdges(modifiedState.edges || [], modifiedState.blocks || {})
modifiedState.edges = valid
if (dropped.length > 0) {
for (const { edge, reason } of dropped) {
logSkippedItem(skippedItems, {
type: 'invalid_edge_scope',
operationType: 'add_edge',
blockId: edge.source,
reason: `Edge from "${edge.source}" to "${edge.target}" skipped - ${reason}`,
details: {
edgeId: edge.id,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
targetId: edge.target,
},
})
}
logger.info('Removed invalid workflow edges', {
removed: dropped.length,
reasons: dropped.map(({ reason }) => reason),
})
}
}

View File

@@ -3,7 +3,11 @@ import { workflow as workflowTable } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import { applyAutoLayout } from '@/lib/workflows/autolayout'
import { applyTargetedLayout } from '@/lib/workflows/autolayout'
import {
DEFAULT_HORIZONTAL_SPACING,
DEFAULT_VERTICAL_SPACING,
} from '@/lib/workflows/autolayout/constants'
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
import {
loadWorkflowFromNormalizedTables,
@@ -13,6 +17,7 @@ import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation'
import { applyOperationsToWorkflowState } from './engine'
import type { EditWorkflowParams, ValidationError } from './types'
import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation'
@@ -30,42 +35,29 @@ async function getCurrentWorkflowStateFromDb(
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
if (!normalized) throw new Error('Workflow has no normalized data')
// Validate and fix blocks without types
const blocks = { ...normalized.blocks }
const invalidBlocks: string[] = []
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
if (!block.type) {
logger.warn(`Block ${id} loaded without type from database`, {
blockKeys: Object.keys(block),
blockName: block.name,
})
invalidBlocks.push(id)
}
})
// Remove invalid blocks
invalidBlocks.forEach((id) => delete blocks[id])
// Remove edges connected to invalid blocks
const edges = normalized.edges.filter(
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
)
const workflowState: any = {
blocks,
edges,
const { state: validatedState, warnings } = normalizeWorkflowState({
blocks: normalized.blocks,
edges: normalized.edges,
loops: normalized.loops || {},
parallels: normalized.parallels || {},
})
if (warnings.length > 0) {
logger.warn('Normalized workflow state loaded from DB for copilot', {
workflowId,
warningCount: warnings.length,
warnings,
})
}
const subBlockValues: Record<string, Record<string, any>> = {}
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
Object.entries(validatedState.blocks).forEach(([blockId, block]) => {
subBlockValues[blockId] = {}
Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => {
if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value
})
})
return { workflowState, subBlockValues }
return { workflowState: validatedState, subBlockValues }
}
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
@@ -137,17 +129,18 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
// Add credential validation errors
validationErrors.push(...credentialErrors)
// Get workspaceId for selector validation
let workspaceId: string | undefined
let workflowName: string | undefined
try {
const [workflowRecord] = await db
.select({ workspaceId: workflowTable.workspaceId })
.select({ workspaceId: workflowTable.workspaceId, name: workflowTable.name })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
workspaceId = workflowRecord?.workspaceId ?? undefined
workflowName = workflowRecord?.name ?? undefined
} catch (error) {
logger.warn('Failed to get workspaceId for selector validation', { error, workflowId })
logger.warn('Failed to get workflow metadata for validation', { error, workflowId })
}
// Validate selector IDs exist in the database
@@ -233,21 +226,38 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
// Persist the workflow state to the database
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
// Apply autolayout to position blocks properly
const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
horizontalSpacing: 250,
verticalSpacing: 100,
padding: { x: 100, y: 100 },
// Identify blocks that need layout by comparing against the pre-operation
// state. New blocks and blocks inserted into subflows (position reset to
// 0,0) need repositioning. Extracted blocks are excluded — their handler
// already computed valid absolute positions from the container offset.
const preOperationBlockIds = new Set(Object.keys(workflowState.blocks || {}))
const blocksNeedingLayout = Object.keys(finalWorkflowState.blocks).filter((id) => {
if (!preOperationBlockIds.has(id)) return true
const prevParent = workflowState.blocks[id]?.data?.parentId ?? null
const currParent = finalWorkflowState.blocks[id]?.data?.parentId ?? null
if (prevParent === currParent) return false
// Parent changed — only needs layout if position was reset to (0,0)
// by insert_into_subflow. extract_from_subflow computes absolute
// positions directly, so those blocks don't need repositioning.
const pos = finalWorkflowState.blocks[id]?.position
return pos?.x === 0 && pos?.y === 0
})
const layoutedBlocks =
layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks
let layoutedBlocks = finalWorkflowState.blocks
if (!layoutResult.success) {
logger.warn('Autolayout failed, using default positions', {
workflowId,
error: layoutResult.error,
})
if (blocksNeedingLayout.length > 0) {
try {
layoutedBlocks = applyTargetedLayout(finalWorkflowState.blocks, finalWorkflowState.edges, {
changedBlockIds: blocksNeedingLayout,
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
verticalSpacing: DEFAULT_VERTICAL_SPACING,
})
} catch (error) {
logger.warn('Targeted autolayout failed, using default positions', {
workflowId,
error: error instanceof Error ? error.message : String(error),
})
}
}
const workflowStateForDb = {
@@ -279,20 +289,25 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown>
logger.info('Workflow state persisted to database', { workflowId })
// Return the modified workflow state with autolayout applied
const sanitizationWarnings = validation.warnings.length > 0 ? validation.warnings : undefined
return {
success: true,
workflowId,
workflowName: workflowName ?? 'Workflow',
workflowState: { ...finalWorkflowState, blocks: layoutedBlocks },
// Include input validation errors so the LLM can see what was rejected
...(inputErrors && {
inputValidationErrors: inputErrors,
inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`,
}),
// Include skipped items so the LLM can see what operations were skipped
...(skippedMessages && {
skippedItems: skippedMessages,
skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`,
}),
...(sanitizationWarnings && {
sanitizationWarnings,
sanitizationMessage: `${sanitizationWarnings.length} field(s) were automatically sanitized: ${sanitizationWarnings.join('; ')}`,
}),
}
},
}

View File

@@ -0,0 +1,264 @@
/**
* @vitest-environment node
*/
import { describe, expect, it, vi } from 'vitest'
import { applyOperationsToWorkflowState } from './engine'
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@/blocks/registry', () => ({
getAllBlocks: () => [
{
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
{
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
{
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
],
getBlock: (type: string) => {
const blocks: Record<string, any> = {
condition: {
type: 'condition',
name: 'Condition',
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
},
agent: {
type: 'agent',
name: 'Agent',
subBlocks: [
{ id: 'systemPrompt', type: 'long-input' },
{ id: 'model', type: 'combobox' },
],
},
function: {
type: 'function',
name: 'Function',
subBlocks: [
{ id: 'code', type: 'code' },
{ id: 'language', type: 'dropdown' },
],
},
}
return blocks[type] || undefined
},
}))
function makeLoopWorkflow() {
return {
blocks: {
'loop-1': {
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
enabled: true,
subBlocks: {},
outputs: {},
data: { loopType: 'for', count: 5 },
},
'condition-1': {
id: 'condition-1',
type: 'condition',
name: 'Condition 1',
position: { x: 100, y: 100 },
enabled: true,
subBlocks: {
conditions: {
id: 'conditions',
type: 'condition-input',
value: JSON.stringify([
{ id: 'condition-1-if', title: 'if', value: 'true' },
{ id: 'condition-1-else', title: 'else', value: '' },
]),
},
},
outputs: {},
data: { parentId: 'loop-1', extent: 'parent' },
},
'agent-1': {
id: 'agent-1',
type: 'agent',
name: 'Agent 1',
position: { x: 300, y: 100 },
enabled: true,
subBlocks: {
systemPrompt: { id: 'systemPrompt', type: 'long-input', value: 'You are helpful' },
model: { id: 'model', type: 'combobox', value: 'gpt-4o' },
},
outputs: {},
data: { parentId: 'loop-1', extent: 'parent' },
},
},
edges: [
{
id: 'edge-1',
source: 'loop-1',
sourceHandle: 'loop-start-source',
target: 'condition-1',
targetHandle: 'target',
type: 'default',
},
{
id: 'edge-2',
source: 'condition-1',
sourceHandle: 'condition-condition-1-if',
target: 'agent-1',
targetHandle: 'target',
type: 'default',
},
],
loops: {},
parallels: {},
}
}
describe('handleEditOperation nestedNodes merge', () => {
it('preserves existing child block IDs when editing a loop with nestedNodes', () => {
const workflow = makeLoopWorkflow()
const { state } = applyOperationsToWorkflowState(workflow, [
{
operation_type: 'edit',
block_id: 'loop-1',
params: {
nestedNodes: {
'new-condition': {
type: 'condition',
name: 'Condition 1',
inputs: {
conditions: [
{ id: 'x', title: 'if', value: 'x > 1' },
{ id: 'y', title: 'else', value: '' },
],
},
},
'new-agent': {
type: 'agent',
name: 'Agent 1',
inputs: { systemPrompt: 'Updated prompt' },
},
},
},
},
])
expect(state.blocks['condition-1']).toBeDefined()
expect(state.blocks['agent-1']).toBeDefined()
expect(state.blocks['new-condition']).toBeUndefined()
expect(state.blocks['new-agent']).toBeUndefined()
})
it('preserves edges for matched children when connections are not provided', () => {
const workflow = makeLoopWorkflow()
const { state } = applyOperationsToWorkflowState(workflow, [
{
operation_type: 'edit',
block_id: 'loop-1',
params: {
nestedNodes: {
x: { type: 'condition', name: 'Condition 1' },
y: { type: 'agent', name: 'Agent 1' },
},
},
},
])
const conditionEdge = state.edges.find((e: any) => e.source === 'condition-1')
expect(conditionEdge).toBeDefined()
})
it('removes children not present in incoming nestedNodes', () => {
const workflow = makeLoopWorkflow()
const { state } = applyOperationsToWorkflowState(workflow, [
{
operation_type: 'edit',
block_id: 'loop-1',
params: {
nestedNodes: {
x: { type: 'condition', name: 'Condition 1' },
},
},
},
])
expect(state.blocks['condition-1']).toBeDefined()
expect(state.blocks['agent-1']).toBeUndefined()
const agentEdges = state.edges.filter(
(e: any) => e.source === 'agent-1' || e.target === 'agent-1'
)
expect(agentEdges).toHaveLength(0)
})
it('creates new children that do not match existing ones', () => {
const workflow = makeLoopWorkflow()
const { state } = applyOperationsToWorkflowState(workflow, [
{
operation_type: 'edit',
block_id: 'loop-1',
params: {
nestedNodes: {
x: { type: 'condition', name: 'Condition 1' },
y: { type: 'agent', name: 'Agent 1' },
'new-func': { type: 'function', name: 'Function 1', inputs: { code: 'return 1' } },
},
},
},
])
expect(state.blocks['condition-1']).toBeDefined()
expect(state.blocks['agent-1']).toBeDefined()
const funcBlock = Object.values(state.blocks).find((b: any) => b.name === 'Function 1')
expect(funcBlock).toBeDefined()
expect((funcBlock as any).data?.parentId).toBe('loop-1')
})
it('updates inputs on matched children without changing their ID', () => {
const workflow = makeLoopWorkflow()
const { state } = applyOperationsToWorkflowState(workflow, [
{
operation_type: 'edit',
block_id: 'loop-1',
params: {
nestedNodes: {
x: {
type: 'agent',
name: 'Agent 1',
inputs: { systemPrompt: 'New prompt' },
},
y: { type: 'condition', name: 'Condition 1' },
},
},
},
])
const agent = state.blocks['agent-1']
expect(agent).toBeDefined()
expect(agent.subBlocks.systemPrompt.value).toBe('New prompt')
})
})

View File

@@ -5,12 +5,11 @@ import { getBlock } from '@/blocks/registry'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
import {
addConnectionsAsEdges,
applyTriggerConfigToBlockSubblocks,
createBlockFromParams,
createValidatedEdge,
filterDisallowedTools,
normalizeArrayWithIds,
normalizeConditionRouterIds,
normalizeResponseFormat,
normalizeTools,
shouldNormalizeArrayIds,
@@ -78,7 +77,8 @@ export function handleDeleteOperation(op: EditWorkflowOperation, ctx: OperationC
}
export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
const { modifiedState, skippedItems, validationErrors, permissionConfig } = ctx
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } =
ctx
const { block_id, params } = op
if (!modifiedState.blocks[block_id]) {
@@ -148,6 +148,8 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
sanitizedValue = normalizeArrayWithIds(value)
}
sanitizedValue = normalizeConditionRouterIds(block_id, key, sanitizedValue)
// Special handling for tools - normalize and filter disallowed
if (key === 'tools' && Array.isArray(value)) {
sanitizedValue = filterDisallowedTools(
@@ -164,9 +166,10 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
}
if (!block.subBlocks[key]) {
const subBlockDef = getBlock(block.type)?.subBlocks.find((sb) => sb.id === key)
block.subBlocks[key] = {
id: key,
type: 'short-input',
type: subBlockDef?.type || 'short-input',
value: sanitizedValue,
}
} else {
@@ -335,38 +338,23 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
block.advancedMode = params.advancedMode
}
// Handle nested nodes update (for loops/parallels)
// Handle nested nodes update (for loops/parallels) using merge strategy.
// Existing children that match an incoming node by name are updated in place
// (preserving their block ID). New children are created. Children not present
// in the incoming set are removed.
if (params?.nestedNodes) {
// Remove all existing child blocks
const existingChildren = Object.keys(modifiedState.blocks).filter(
(id) => modifiedState.blocks[id].data?.parentId === block_id
)
existingChildren.forEach((childId) => delete modifiedState.blocks[childId])
// Remove edges to/from removed children
modifiedState.edges = modifiedState.edges.filter(
(edge: any) =>
!existingChildren.includes(edge.source) && !existingChildren.includes(edge.target)
const existingChildren: Array<[string, any]> = Object.entries(modifiedState.blocks).filter(
([, b]: [string, any]) => b.data?.parentId === block_id
)
// Add new nested blocks
const existingByName = new Map<string, [string, any]>()
for (const [id, child] of existingChildren) {
existingByName.set(normalizeName(child.name), [id, child])
}
const matchedExistingIds = new Set<string>()
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
// Validate childId is a valid string
if (!isValidKey(childId)) {
logSkippedItem(skippedItems, {
type: 'missing_required_params',
operationType: 'add_nested_node',
blockId: String(childId || 'invalid'),
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
})
logger.error('Invalid childId detected in nestedNodes', {
parentBlockId: block_id,
childId,
childId_type: typeof childId,
})
return
}
if (childBlock.type === 'loop' || childBlock.type === 'parallel') {
logSkippedItem(skippedItems, {
type: 'nested_subflow_not_allowed',
@@ -378,22 +366,108 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
return
}
const childBlockState = createBlockFromParams(
childId,
childBlock,
block_id,
validationErrors,
permissionConfig,
skippedItems
)
modifiedState.blocks[childId] = childBlockState
const incomingName = normalizeName(childBlock.name || '')
const existingMatch = incomingName ? existingByName.get(incomingName) : undefined
// Add connections for child block
if (childBlock.connections) {
addConnectionsAsEdges(modifiedState, childId, childBlock.connections, logger, skippedItems)
if (existingMatch) {
const [existingId, existingBlock] = existingMatch
matchedExistingIds.add(existingId)
if (childBlock.inputs) {
if (!existingBlock.subBlocks) existingBlock.subBlocks = {}
const childValidation = validateInputsForBlock(
existingBlock.type,
childBlock.inputs,
existingId
)
validationErrors.push(...childValidation.errors)
Object.entries(childValidation.validInputs).forEach(([key, value]) => {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) return
let sanitizedValue = value
if (shouldNormalizeArrayIds(key)) {
sanitizedValue = normalizeArrayWithIds(value)
}
sanitizedValue = normalizeConditionRouterIds(existingId, key, sanitizedValue)
if (key === 'tools' && Array.isArray(value)) {
sanitizedValue = filterDisallowedTools(
normalizeTools(value),
permissionConfig,
existingId,
skippedItems
)
}
if (key === 'responseFormat' && value) {
sanitizedValue = normalizeResponseFormat(value)
}
const subBlockDef = getBlock(existingBlock.type)?.subBlocks.find(
(sb: any) => sb.id === key
)
if (!existingBlock.subBlocks[key]) {
existingBlock.subBlocks[key] = {
id: key,
type: subBlockDef?.type || 'short-input',
value: sanitizedValue,
}
} else {
existingBlock.subBlocks[key].value = sanitizedValue
}
})
}
if (childBlock.connections) {
modifiedState.edges = modifiedState.edges.filter(
(edge: any) => edge.source !== existingId
)
deferredConnections.push({
blockId: existingId,
connections: childBlock.connections,
})
}
} else {
if (!isValidKey(childId)) {
logSkippedItem(skippedItems, {
type: 'missing_required_params',
operationType: 'add_nested_node',
blockId: String(childId || 'invalid'),
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
})
return
}
const childBlockState = createBlockFromParams(
childId,
childBlock,
block_id,
validationErrors,
permissionConfig,
skippedItems
)
modifiedState.blocks[childId] = childBlockState
if (childBlock.connections) {
deferredConnections.push({
blockId: childId,
connections: childBlock.connections,
})
}
}
})
const removedIds = new Set<string>()
for (const [existingId] of existingChildren) {
if (!matchedExistingIds.has(existingId)) {
delete modifiedState.blocks[existingId]
removedIds.add(existingId)
}
}
if (removedIds.size > 0) {
modifiedState.edges = modifiedState.edges.filter(
(edge: any) => !removedIds.has(edge.source) && !removedIds.has(edge.target)
)
}
// Update loop/parallel configuration based on type (strict validation)
if (block.type === 'loop') {
block.data = block.data || {}
@@ -446,47 +520,13 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
}
}
// Handle connections update (convert to edges)
// Defer connections to pass 2 so all blocks exist before edges are created
if (params?.connections) {
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
Object.entries(params.connections).forEach(([connectionType, targets]) => {
if (targets === null) return
const mapConnectionTypeToHandle = (type: string): string => {
if (type === 'success') return 'source'
if (type === 'error') return 'error'
return type
}
const sourceHandle = mapConnectionTypeToHandle(connectionType)
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
createValidatedEdge(
modifiedState,
block_id,
targetBlock,
sourceHandle,
targetHandle || 'target',
'edit',
logger,
skippedItems
)
}
if (typeof targets === 'string') {
addEdgeForTarget(targets)
} else if (Array.isArray(targets)) {
targets.forEach((target: any) => {
if (typeof target === 'string') {
addEdgeForTarget(target)
} else if (target?.block) {
addEdgeForTarget(target.block, target.handle)
}
})
} else if (typeof targets === 'object' && (targets as any)?.block) {
addEdgeForTarget((targets as any).block, (targets as any).handle)
}
deferredConnections.push({
blockId: block_id,
connections: params.connections,
})
}
@@ -827,12 +867,16 @@ export function handleInsertIntoSubflowOperation(
return
}
// Moving existing block into subflow - just update parent
// Moving existing block into subflow update parent and reset position.
// Position must be reset because React Flow uses coordinates relative to
// the parent container; keeping the old absolute position would place the
// block far outside the container's bounds.
existingBlock.data = {
...existingBlock.data,
parentId: subflowId,
extent: 'parent' as const,
}
existingBlock.position = { x: 0, y: 0 }
// Update inputs if provided (with validation)
if (params.inputs) {
@@ -853,6 +897,8 @@ export function handleInsertIntoSubflowOperation(
sanitizedValue = normalizeArrayWithIds(value)
}
sanitizedValue = normalizeConditionRouterIds(block_id, key, sanitizedValue)
// Special handling for tools - normalize and filter disallowed
if (key === 'tools' && Array.isArray(value)) {
sanitizedValue = filterDisallowedTools(
@@ -869,9 +915,10 @@ export function handleInsertIntoSubflowOperation(
}
if (!existingBlock.subBlocks[key]) {
const subBlockDef = getBlock(existingBlock.type)?.subBlocks.find((sb) => sb.id === key)
existingBlock.subBlocks[key] = {
id: key,
type: 'short-input',
type: subBlockDef?.type || 'short-input',
value: sanitizedValue,
}
} else {
@@ -1006,12 +1053,25 @@ export function handleExtractFromSubflowOperation(
})
}
// Remove parent relationship
// Convert from relative (to container) to absolute position so the block
// appears at roughly the same visual location after extraction. This avoids
// needing targeted layout to reposition it — extracted blocks often lose
// their edges to siblings still in the container, making them disconnected
// and causing layout to stack them at layer 0.
const container = modifiedState.blocks[subflowId]
if (container?.position && block.position) {
block.position = {
x: (container.position.x ?? 0) + (block.position.x ?? 0),
y: (container.position.y ?? 0) + (block.position.y ?? 0),
}
} else {
// Fallback to (0,0) which signals to blocksNeedingLayout in index.ts
// that this block requires targeted layout repositioning.
block.position = { x: 0, y: 0 }
}
if (block.data) {
block.data.parentId = undefined
block.data.extent = undefined
}
// Note: We keep the block and its edges, just remove parent relationship
// The block becomes a root-level block
}

View File

@@ -42,6 +42,7 @@ export type SkippedItemType =
| 'tool_not_allowed'
| 'invalid_edge_target'
| 'invalid_edge_source'
| 'invalid_edge_scope'
| 'invalid_source_handle'
| 'invalid_target_handle'
| 'invalid_subblock_field'

View File

@@ -0,0 +1,90 @@
/**
* @vitest-environment node
*/
import { describe, expect, it, vi } from 'vitest'
import { normalizeConditionRouterIds } from './builders'
import { validateInputsForBlock } from './validation'
const conditionBlockConfig = {
type: 'condition',
name: 'Condition',
outputs: {},
subBlocks: [{ id: 'conditions', type: 'condition-input' }],
}
const routerBlockConfig = {
type: 'router_v2',
name: 'Router',
outputs: {},
subBlocks: [{ id: 'routes', type: 'router-input' }],
}
vi.mock('@/blocks/registry', () => ({
getBlock: (type: string) =>
type === 'condition'
? conditionBlockConfig
: type === 'router_v2'
? routerBlockConfig
: undefined,
}))
describe('validateInputsForBlock', () => {
it('accepts condition-input arrays with arbitrary item ids', () => {
const result = validateInputsForBlock(
'condition',
{
conditions: JSON.stringify([
{ id: 'cond-1-if', title: 'if', value: 'true' },
{ id: 'cond-1-else', title: 'else', value: '' },
]),
},
'condition-1'
)
expect(result.validInputs.conditions).toBeDefined()
expect(result.errors).toHaveLength(0)
})
it('rejects non-array condition-input values', () => {
const result = validateInputsForBlock('condition', { conditions: 'not-json' }, 'condition-1')
expect(result.validInputs.conditions).toBeUndefined()
expect(result.errors).toHaveLength(1)
expect(result.errors[0]?.error).toContain('expected a JSON array')
})
})
describe('normalizeConditionRouterIds', () => {
it('assigns canonical block-scoped ids to condition branches', () => {
const input = JSON.stringify([
{ id: 'whatever', title: 'if', value: 'true' },
{ id: 'anything', title: 'else if', value: 'false' },
{ id: 'doesnt-matter', title: 'else', value: '' },
])
const result = normalizeConditionRouterIds('block-1', 'conditions', input)
const parsed = JSON.parse(result as string)
expect(parsed[0].id).toBe('block-1-if')
expect(parsed[1].id).toBe('block-1-else-if-0')
expect(parsed[2].id).toBe('block-1-else')
})
it('assigns canonical block-scoped ids to router routes', () => {
const input = [
{ id: 'route-a', title: 'Support', value: 'support query' },
{ id: 'route-b', title: 'Sales', value: 'sales query' },
]
const result = normalizeConditionRouterIds('block-1', 'routes', input)
const arr = result as any[]
expect(arr[0].id).toBe('block-1-route1')
expect(arr[1].id).toBe('block-1-route2')
})
it('passes through non-condition/router keys unchanged', () => {
const input = 'some value'
expect(normalizeConditionRouterIds('block-1', 'code', input)).toBe(input)
})
})

View File

@@ -244,6 +244,35 @@ export function validateValueForSubBlockType(
return { valid: true, value }
}
case 'condition-input':
case 'router-input': {
const parsedValue =
typeof value === 'string'
? (() => {
try {
return JSON.parse(value)
} catch {
return null
}
})()
: value
if (!Array.isArray(parsedValue)) {
return {
valid: false,
error: {
blockId,
blockType,
field: fieldName,
value,
error: `Invalid ${type} value for field "${fieldName}" - expected a JSON array`,
},
}
}
return { valid: true, value }
}
case 'tool-input': {
// Should be an array of tool objects
if (!Array.isArray(value)) {

View File

@@ -0,0 +1,85 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import type { BlockState } from '@/stores/workflows/workflow/types'
import {
collectDynamicHandleTopologySignatures,
getChangedDynamicHandleBlockIds,
getConditionRows,
getDynamicHandleTopologySignature,
getRouterRows,
} from './dynamic-handle-topology'
describe('dynamic handle topology', () => {
it('falls back to canonical condition rows when value is empty', () => {
expect(getConditionRows('condition-1', null)).toEqual([
{ id: 'condition-1-if', title: 'if', value: '' },
{ id: 'condition-1-else', title: 'else', value: '' },
])
})
it('falls back to canonical router rows when value is empty', () => {
expect(getRouterRows('router-1', null)).toEqual([{ id: 'router-1-route1', value: '' }])
})
it('builds topology signatures from condition ids', () => {
const block = {
id: 'condition-1',
type: 'condition',
subBlocks: {
conditions: {
id: 'conditions',
type: 'condition-input',
value: JSON.stringify([
{ id: 'condition-1-if', title: 'if', value: 'true' },
{ id: 'condition-1-else', title: 'else', value: '' },
]),
},
},
} as BlockState
expect(getDynamicHandleTopologySignature(block)).toBe(
'condition:condition-1-if|condition-1-else'
)
})
it('detects topology changes only for changed dynamic-handle blocks', () => {
const previous = new Map<string, string>([
['condition-1', 'condition:condition-1-if|condition-1-else'],
])
const nextBlocks = {
'condition-1': {
id: 'condition-1',
type: 'condition',
name: 'Condition 1',
position: { x: 0, y: 0 },
enabled: true,
subBlocks: {
conditions: {
id: 'conditions',
type: 'condition-input',
value: JSON.stringify([
{ id: 'condition-1-if', title: 'if', value: 'true' },
{ id: 'condition-1-else-if-0', title: 'else if', value: 'false' },
{ id: 'condition-1-else', title: 'else', value: '' },
]),
},
},
outputs: {},
},
'function-1': {
id: 'function-1',
type: 'function',
name: 'Function 1',
position: { x: 0, y: 0 },
enabled: true,
subBlocks: {},
outputs: {},
},
} as Record<string, BlockState>
const next = collectDynamicHandleTopologySignatures(nextBlocks)
expect(getChangedDynamicHandleBlockIds(previous, next)).toEqual(['condition-1'])
})
})

View File

@@ -0,0 +1,143 @@
import type { BlockState } from '@/stores/workflows/workflow/types'
export interface ConditionRow {
id: string
title: string
value: string
}
export interface RouterRow {
id: string
value: string
}
function parseStructuredValue(value: unknown): unknown[] | null {
if (typeof value === 'string') {
try {
const parsed = JSON.parse(value)
return Array.isArray(parsed) ? parsed : null
} catch {
return null
}
}
return Array.isArray(value) ? value : null
}
export function isDynamicHandleBlockType(
type: string | undefined
): type is 'condition' | 'router_v2' {
return type === 'condition' || type === 'router_v2'
}
export function getDynamicHandleSubblockId(
blockType: string | undefined
): 'conditions' | 'routes' | null {
if (blockType === 'condition') return 'conditions'
if (blockType === 'router_v2') return 'routes'
return null
}
export function getDynamicHandleSubblockType(
blockType: string | undefined
): 'condition-input' | 'router-input' | null {
if (blockType === 'condition') return 'condition-input'
if (blockType === 'router_v2') return 'router-input'
return null
}
export function isDynamicHandleSubblock(
blockType: string | undefined,
subblockId: string
): boolean {
return getDynamicHandleSubblockId(blockType) === subblockId
}
export function getConditionRows(blockId: string, value: unknown): ConditionRow[] {
const parsed = parseStructuredValue(value)
if (parsed) {
const rows = parsed.map((item, index) => {
const conditionItem = item as { id?: string; value?: unknown }
const title = index === 0 ? 'if' : index === parsed.length - 1 ? 'else' : 'else if'
return {
id: conditionItem?.id ?? `${blockId}-cond-${index}`,
title,
value: typeof conditionItem?.value === 'string' ? conditionItem.value : '',
}
})
if (rows.length > 0) {
return rows
}
}
return [
{ id: `${blockId}-if`, title: 'if', value: '' },
{ id: `${blockId}-else`, title: 'else', value: '' },
]
}
export function getRouterRows(blockId: string, value: unknown): RouterRow[] {
const parsed = parseStructuredValue(value)
if (parsed) {
const rows = parsed.map((item, index) => {
const routeItem = item as { id?: string; value?: string }
return {
id: routeItem?.id ?? `${blockId}-route${index + 1}`,
value: routeItem?.value ?? '',
}
})
if (rows.length > 0) {
return rows
}
}
return [{ id: `${blockId}-route1`, value: '' }]
}
export function getDynamicHandleTopologySignature(block: BlockState): string | null {
if (block.type === 'condition') {
const rows = getConditionRows(block.id, block.subBlocks?.conditions?.value)
return `condition:${rows.map((row) => row.id).join('|')}`
}
if (block.type === 'router_v2') {
const rows = getRouterRows(block.id, block.subBlocks?.routes?.value)
return `router:${rows.map((row) => row.id).join('|')}`
}
return null
}
export function collectDynamicHandleTopologySignatures(
blocks: Record<string, BlockState>
): Map<string, string> {
const signatures = new Map<string, string>()
for (const [blockId, block] of Object.entries(blocks)) {
const signature = getDynamicHandleTopologySignature(block)
if (signature) {
signatures.set(blockId, signature)
}
}
return signatures
}
export function getChangedDynamicHandleBlockIds(
previous: Map<string, string>,
next: Map<string, string>
): string[] {
const changedIds: string[] = []
for (const [blockId, signature] of next) {
if (previous.get(blockId) !== signature) {
changedIds.push(blockId)
}
}
return changedIds
}

View File

@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
import { client } from '@/lib/auth/auth-client'
import { useOperationQueueStore } from '@/stores/operation-queue/store'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation'
const logger = createLogger('WorkflowSocketOperations')
@@ -76,11 +77,21 @@ export async function enqueueReplaceWorkflowState({
state,
operationId,
}: EnqueueReplaceStateArgs): Promise<string> {
const { state: validatedState, warnings } = normalizeWorkflowState(state)
if (warnings.length > 0) {
logger.warn('Normalized state before enqueuing replace-state', {
workflowId,
warningCount: warnings.length,
warnings,
})
}
return enqueueWorkflowOperation({
workflowId,
operation: 'replace-state',
target: 'workflow',
payload: { state },
payload: { state: validatedState },
operationId,
})
}

View File

@@ -1854,7 +1854,7 @@ export const useCopilotStore = create<CopilotStore>()(
}
// Apply to main workflow store
useWorkflowStore.setState({
useWorkflowStore.getState().replaceWorkflowState({
blocks: reverted.blocks ?? {},
edges: reverted.edges ?? [],
loops: reverted.loops ?? {},

View File

@@ -190,22 +190,6 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
logger.warn('Failed to broadcast workflow state (non-blocking)', { error })
})
// Fire and forget: persist to database (don't await)
persistWorkflowStateToServer(activeWorkflowId, candidateState)
.then((persisted) => {
if (!persisted) {
logger.warn('Failed to persist copilot edits (state already applied locally)')
// Don't revert - user can retry or state will sync on next save
} else {
logger.info('Workflow diff persisted to database', {
workflowId: activeWorkflowId,
})
}
})
.catch((error) => {
logger.warn('Failed to persist workflow state (non-blocking)', { error })
})
// Emit event for undo/redo recording
if (!options?.skipRecording) {
window.dispatchEvent(

View File

@@ -332,7 +332,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
return
}
useWorkflowStore.setState(workflowState)
useWorkflowStore.getState().replaceWorkflowState(workflowState)
useSubBlockStore.getState().initializeFromWorkflow(workflowId, workflowState.blocks || {})
if (workflowData?.variables && typeof workflowData.variables === 'object') {
@@ -637,7 +637,7 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
useSubBlockStore.setState({ workflowValues: originalState.subBlockValues })
if (originalState.workflowStoreState) {
useWorkflowStore.setState(originalState.workflowStoreState)
useWorkflowStore.getState().replaceWorkflowState(originalState.workflowStoreState)
logger.info(`Restored workflow store state for workflow ${id}`)
}

View File

@@ -6,10 +6,10 @@ import { remapConditionBlockIds, remapConditionEdgeHandle } from '@/lib/workflow
import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
import { buildDefaultCanonicalModes } from '@/lib/workflows/subblocks/visibility'
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { getBlock } from '@/blocks'
import { isAnnotationOnlyBlock, normalizeName } from '@/executor/constants'
import { normalizeName } from '@/executor/constants'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { validateEdges } from '@/stores/workflows/workflow/edge-validation'
import type {
BlockState,
Loop,
@@ -23,30 +23,11 @@ import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
/** Threshold to detect viewport-based offsets vs small duplicate offsets */
const LARGE_OFFSET_THRESHOLD = 300
/**
* Checks if an edge is valid (source and target exist, not annotation-only, target is not a trigger)
*/
function isValidEdge(
edge: Edge,
blocks: Record<string, { type: string; triggerMode?: boolean }>
): boolean {
const sourceBlock = blocks[edge.source]
const targetBlock = blocks[edge.target]
if (!sourceBlock || !targetBlock) return false
if (isAnnotationOnlyBlock(sourceBlock.type)) return false
if (isAnnotationOnlyBlock(targetBlock.type)) return false
if (TriggerUtils.isTriggerBlock(targetBlock)) return false
return true
}
/**
* Filters edges to only include valid ones (target exists and is not a trigger block)
*/
export function filterValidEdges(
edges: Edge[],
blocks: Record<string, { type: string; triggerMode?: boolean }>
): Edge[] {
return edges.filter((edge) => isValidEdge(edge, blocks))
export function filterValidEdges(edges: Edge[], blocks: Record<string, BlockState>): Edge[] {
return validateEdges(edges, blocks).valid
}
export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] {

View File

@@ -0,0 +1,86 @@
import type { Edge } from 'reactflow'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { isAnnotationOnlyBlock } from '@/executor/constants'
import type { BlockState } from '@/stores/workflows/workflow/types'
export interface DroppedEdge {
edge: Edge
reason: string
}
export interface EdgeValidationResult {
valid: Edge[]
dropped: DroppedEdge[]
}
function isContainerBlock(block: BlockState | undefined): boolean {
return block?.type === 'loop' || block?.type === 'parallel'
}
function getParentId(block: BlockState | undefined): string | null {
return block?.data?.parentId ?? null
}
function getScopeDropReason(edge: Edge, blocks: Record<string, BlockState>): string | null {
const sourceBlock = blocks[edge.source]
const targetBlock = blocks[edge.target]
if (!sourceBlock || !targetBlock) {
return 'edge references a missing block'
}
const sourceParent = getParentId(sourceBlock)
const targetParent = getParentId(targetBlock)
if (sourceParent === targetParent) {
return null
}
if (targetParent === edge.source && isContainerBlock(sourceBlock)) {
return null
}
if (sourceParent === edge.target && isContainerBlock(targetBlock)) {
return null
}
return `blocks are in different scopes (${sourceParent ?? 'root'} -> ${targetParent ?? 'root'})`
}
export function validateEdges(
edges: Edge[],
blocks: Record<string, BlockState>
): EdgeValidationResult {
const valid: Edge[] = []
const dropped: DroppedEdge[] = []
for (const edge of edges) {
const sourceBlock = blocks[edge.source]
const targetBlock = blocks[edge.target]
if (!sourceBlock || !targetBlock) {
dropped.push({ edge, reason: 'edge references a missing block' })
continue
}
if (isAnnotationOnlyBlock(sourceBlock.type) || isAnnotationOnlyBlock(targetBlock.type)) {
dropped.push({ edge, reason: 'edge references an annotation-only block' })
continue
}
if (TriggerUtils.isTriggerBlock(targetBlock)) {
dropped.push({ edge, reason: 'trigger blocks cannot be edge targets' })
continue
}
const scopeDropReason = getScopeDropReason(edge, blocks)
if (scopeDropReason) {
dropped.push({ edge, reason: scopeDropReason })
continue
}
valid.push(edge)
}
return { valid, dropped }
}

View File

@@ -792,6 +792,40 @@ describe('workflow store', () => {
})
})
describe('syncDynamicHandleSubblockValue', () => {
it('should sync condition topology values into the workflow store', () => {
addBlock('condition-1', 'condition', 'Condition 1', { x: 0, y: 0 })
useWorkflowStore.getState().syncDynamicHandleSubblockValue(
'condition-1',
'conditions',
JSON.stringify([
{ id: 'condition-1-if', title: 'if', value: 'true' },
{ id: 'condition-1-else', title: 'else', value: '' },
])
)
const conditionBlock = useWorkflowStore.getState().blocks['condition-1']
expect(conditionBlock.subBlocks.conditions?.type).toBe('condition-input')
expect(conditionBlock.subBlocks.conditions?.value).toBe(
JSON.stringify([
{ id: 'condition-1-if', title: 'if', value: 'true' },
{ id: 'condition-1-else', title: 'else', value: '' },
])
)
})
it('should ignore non-topology subblock updates', () => {
addBlock('function-1', 'function', 'Function 1', { x: 0, y: 0 })
const beforeBlock = useWorkflowStore.getState().blocks['function-1']
useWorkflowStore.getState().syncDynamicHandleSubblockValue('function-1', 'code', 'return 1')
const afterBlock = useWorkflowStore.getState().blocks['function-1']
expect(afterBlock).toEqual(beforeBlock)
})
})
describe('getWorkflowState', () => {
it('should return current workflow state', () => {
const { getWorkflowState } = useWorkflowStore.getState()

View File

@@ -3,6 +3,10 @@ import type { Edge } from 'reactflow'
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import {
getDynamicHandleSubblockType,
isDynamicHandleSubblock,
} from '@/lib/workflows/dynamic-handle-topology'
import type { SubBlockConfig } from '@/blocks/types'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -27,6 +31,7 @@ import {
isBlockProtected,
wouldCreateCycle,
} from '@/stores/workflows/workflow/utils'
import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation'
const logger = createLogger('WorkflowStore')
@@ -511,15 +516,25 @@ export const useWorkflowStore = create<WorkflowStore>()(
options?: { updateLastSaved?: boolean }
) => {
set((state) => {
const nextBlocks = workflowState.blocks || {}
const nextEdges = filterValidEdges(workflowState.edges || [], nextBlocks)
const normalization = normalizeWorkflowState(workflowState)
const nextState = normalization.state
if (normalization.warnings.length > 0) {
logger.warn('Normalized workflow state during replaceWorkflowState', {
warningCount: normalization.warnings.length,
warnings: normalization.warnings,
})
}
const nextBlocks = nextState.blocks || {}
const nextEdges = nextState.edges || []
const nextLoops =
Object.keys(workflowState.loops || {}).length > 0
? workflowState.loops
Object.keys(nextState.loops || {}).length > 0
? nextState.loops
: generateLoopBlocks(nextBlocks)
const nextParallels =
Object.keys(workflowState.parallels || {}).length > 0
? workflowState.parallels
Object.keys(nextState.parallels || {}).length > 0
? nextState.parallels
: generateParallelBlocks(nextBlocks)
return {
@@ -528,15 +543,15 @@ export const useWorkflowStore = create<WorkflowStore>()(
edges: nextEdges,
loops: nextLoops,
parallels: nextParallels,
deploymentStatuses: workflowState.deploymentStatuses || state.deploymentStatuses,
deploymentStatuses: nextState.deploymentStatuses || state.deploymentStatuses,
needsRedeployment:
workflowState.needsRedeployment !== undefined
? workflowState.needsRedeployment
nextState.needsRedeployment !== undefined
? nextState.needsRedeployment
: state.needsRedeployment,
lastSaved:
options?.updateLastSaved === true
? Date.now()
: (workflowState.lastSaved ?? state.lastSaved),
: (nextState.lastSaved ?? state.lastSaved),
}
})
},
@@ -856,6 +871,48 @@ export const useWorkflowStore = create<WorkflowStore>()(
get().updateLastSaved()
},
syncDynamicHandleSubblockValue: (blockId: string, subblockId: string, value: unknown) => {
set((state) => {
const block = state.blocks[blockId]
if (!block || !isDynamicHandleSubblock(block.type, subblockId)) {
return state
}
const expectedType = getDynamicHandleSubblockType(block.type)
if (!expectedType) {
return state
}
const currentSubBlock = block.subBlocks?.[subblockId]
const currentValue = currentSubBlock?.value
const valuesEqual =
typeof currentValue === 'object' || typeof value === 'object'
? JSON.stringify(currentValue) === JSON.stringify(value)
: currentValue === value
if (valuesEqual && currentSubBlock?.type === expectedType) {
return state
}
return {
blocks: {
...state.blocks,
[blockId]: {
...block,
subBlocks: {
...block.subBlocks,
[subblockId]: {
id: subblockId,
type: expectedType,
value: value as SubBlockState['value'],
},
},
},
},
}
})
},
setBlockTriggerMode: (id: string, triggerMode: boolean) => {
set((state) => ({
blocks: {
@@ -1055,36 +1112,20 @@ export const useWorkflowStore = create<WorkflowStore>()(
return
}
// Preserving the workflow-specific deployment status if it exists
const deploymentStatus = useWorkflowRegistry
.getState()
.getWorkflowDeploymentStatus(activeWorkflowId)
const newState = {
blocks: deployedState.blocks,
edges: filterValidEdges(deployedState.edges ?? [], deployedState.blocks),
loops: deployedState.loops || {},
parallels: deployedState.parallels || {},
get().replaceWorkflowState({
...deployedState,
needsRedeployment: false,
// Keep existing deployment statuses and update for the active workflow if needed
deploymentStatuses: {
...get().deploymentStatuses,
...(deploymentStatus
? {
[activeWorkflowId]: deploymentStatus,
}
: {}),
...(deploymentStatus ? { [activeWorkflowId]: deploymentStatus } : {}),
},
}
})
// Update the main workflow state
set(newState)
// Initialize subblock store with values from deployed state
const subBlockStore = useSubBlockStore.getState()
const values: Record<string, Record<string, any>> = {}
// Extract subblock values from deployed blocks
Object.entries(deployedState.blocks).forEach(([blockId, block]) => {
values[blockId] = {}
Object.entries(block.subBlocks || {}).forEach(([subBlockId, subBlock]) => {
@@ -1092,10 +1133,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
})
})
// Update subblock store with deployed values
useSubBlockStore.setState({
workflowValues: {
...subBlockStore.workflowValues,
...useSubBlockStore.getState().workflowValues,
[activeWorkflowId]: values,
},
})

View File

@@ -212,6 +212,7 @@ export interface WorkflowActions {
}
setBlockAdvancedMode: (id: string, advancedMode: boolean) => void
setBlockCanonicalMode: (id: string, canonicalId: string, mode: 'basic' | 'advanced') => void
syncDynamicHandleSubblockValue: (blockId: string, subblockId: string, value: unknown) => void
setBlockTriggerMode: (id: string, triggerMode: boolean) => void
updateBlockLayoutMetrics: (id: string, dimensions: { width: number; height: number }) => void
triggerUpdate: () => void

View File

@@ -0,0 +1,67 @@
import { validateEdges } from '@/stores/workflows/workflow/edge-validation'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
export interface NormalizationResult {
state: WorkflowState
warnings: string[]
}
function isContainerType(type: string | undefined): boolean {
return type === 'loop' || type === 'parallel'
}
export function normalizeWorkflowState(workflowState: WorkflowState): NormalizationResult {
const warnings: string[] = []
const blocks = structuredClone(workflowState.blocks || {})
for (const [blockId, block] of Object.entries(blocks)) {
if (!block?.type || !block?.name) {
warnings.push(`Dropped invalid block "${blockId}" because it is missing type or name`)
delete blocks[blockId]
}
}
for (const [blockId, block] of Object.entries(blocks)) {
const parentId = block.data?.parentId
if (!parentId) {
continue
}
const parentBlock = blocks[parentId]
const parentIsValidContainer = Boolean(parentBlock && isContainerType(parentBlock.type))
if (!parentIsValidContainer || parentId === blockId) {
warnings.push(`Cleared invalid parentId for block "${blockId}"`)
block.data = {
...(block.data || {}),
parentId: undefined,
extent: undefined,
}
continue
}
if (block.data?.extent !== 'parent') {
block.data = {
...(block.data || {}),
extent: 'parent',
}
}
}
const edgeValidation = validateEdges(workflowState.edges || [], blocks)
warnings.push(
...edgeValidation.dropped.map(({ edge, reason }) => `Dropped edge "${edge.id}": ${reason}`)
)
return {
state: {
...workflowState,
blocks,
edges: edgeValidation.valid,
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
},
warnings,
}
}