feat(copy-paste): allow cross workflow selection, paste, move for blocks (#2649)

* feat(copy-paste): allow cross workflow selection, paste, move for blocks

* fix drag options

* add keyboard and mouse controls into docs

* refactor sockets and undo/redo for batch additions and removals

* fix tests

* cleanup more code

* fix perms issue

* fix subflow copy/paste

* remove log file

* fit paste in viewport bounds

* fix deselection
This commit is contained in:
Vikhyath Mondreti
2025-12-31 02:47:06 -08:00
committed by GitHub
parent fb148c6203
commit bf5d0a5573
30 changed files with 2125 additions and 2058 deletions

View File

@@ -0,0 +1,64 @@
---
title: Keyboard Shortcuts
description: Master the workflow canvas with keyboard shortcuts and mouse controls
---
import { Callout } from 'fumadocs-ui/components/callout'
Speed up your workflow building with these keyboard shortcuts and mouse controls. All shortcuts work when the canvas is focused (not when typing in an input field).
<Callout type="info">
**Mod** refers to `Cmd` on macOS and `Ctrl` on Windows/Linux.
</Callout>
## Canvas Controls
### Mouse Controls
| Action | Control |
|--------|---------|
| Pan/move canvas | Left-drag on empty space |
| Pan/move canvas | Scroll or trackpad |
| Select multiple blocks | Right-drag to draw selection box |
| Drag block | Left-drag on block header |
| Add to selection | `Mod` + click on blocks |
### Workflow Actions
| Shortcut | Action |
|----------|--------|
| `Mod` + `Enter` | Run workflow (or cancel if running) |
| `Mod` + `Z` | Undo |
| `Mod` + `Shift` + `Z` | Redo |
| `Mod` + `C` | Copy selected blocks |
| `Mod` + `V` | Paste blocks |
| `Delete` or `Backspace` | Delete selected blocks or edges |
| `Shift` + `L` | Auto-layout canvas |
## Panel Navigation
These shortcuts switch between panel tabs on the right side of the canvas.
| Shortcut | Action |
|----------|--------|
| `C` | Focus Copilot tab |
| `T` | Focus Toolbar tab |
| `E` | Focus Editor tab |
| `Mod` + `F` | Focus Toolbar search |
## Global Navigation
| Shortcut | Action |
|----------|--------|
| `Mod` + `K` | Open search |
| `Mod` + `Shift` + `A` | Add new agent workflow |
| `Mod` + `Y` | Go to templates |
| `Mod` + `L` | Go to logs |
## Utility
| Shortcut | Action |
|----------|--------|
| `Mod` + `D` | Clear terminal console |
| `Mod` + `E` | Clear notifications |

View File

@@ -14,7 +14,8 @@
"execution",
"permissions",
"sdks",
"self-hosting"
"self-hosting",
"./keyboard-shortcuts/index"
],
"defaultOpen": false
}

View File

@@ -61,7 +61,7 @@ export interface SubflowNodeData {
*/
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const { collaborativeBatchRemoveBlocks } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
const currentWorkflow = useCurrentWorkflow()
@@ -184,7 +184,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
variant='ghost'
onClick={(e) => {
e.stopPropagation()
collaborativeRemoveBlock(id)
collaborativeBatchRemoveBlocks([id])
}}
className='h-[14px] w-[14px] p-0 opacity-0 transition-opacity duration-100 group-hover:opacity-100'
>

View File

@@ -4,8 +4,13 @@ import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getUniqueBlockName, prepareDuplicateBlockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const DEFAULT_DUPLICATE_OFFSET = { x: 50, y: 50 }
/**
* Props for the ActionBar component
*/
@@ -27,11 +32,39 @@ interface ActionBarProps {
export const ActionBar = memo(
function ActionBar({ blockId, blockType, disabled = false }: ActionBarProps) {
const {
collaborativeRemoveBlock,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeToggleBlockEnabled,
collaborativeDuplicateBlock,
collaborativeToggleBlockHandles,
} = useCollaborativeWorkflow()
const { activeWorkflowId } = useWorkflowRegistry()
const blocks = useWorkflowStore((state) => state.blocks)
const subBlockStore = useSubBlockStore()
const handleDuplicateBlock = useCallback(() => {
const sourceBlock = blocks[blockId]
if (!sourceBlock) return
const newId = crypto.randomUUID()
const newName = getUniqueBlockName(sourceBlock.name, blocks)
const subBlockValues = subBlockStore.workflowValues[activeWorkflowId || '']?.[blockId] || {}
const { block, subBlockValues: filteredValues } = prepareDuplicateBlockState({
sourceBlock,
newId,
newName,
positionOffset: DEFAULT_DUPLICATE_OFFSET,
subBlockValues,
})
collaborativeBatchAddBlocks([block], [], {}, {}, { [newId]: filteredValues })
}, [
blockId,
blocks,
activeWorkflowId,
subBlockStore.workflowValues,
collaborativeBatchAddBlocks,
])
/**
* Optimized single store subscription for all block data
@@ -115,7 +148,7 @@ export const ActionBar = memo(
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeDuplicateBlock(blockId)
handleDuplicateBlock()
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
@@ -185,7 +218,7 @@ export const ActionBar = memo(
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeRemoveBlock(blockId)
collaborativeBatchRemoveBlocks([blockId])
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'

View File

@@ -60,7 +60,7 @@ import { usePanelEditorStore } from '@/stores/panel/editor/store'
import { useGeneralStore } from '@/stores/settings/general/store'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { getUniqueBlockName } from '@/stores/workflows/utils'
import { getUniqueBlockName, prepareBlockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/** Lazy-loaded components for non-critical UI that can load after initial render */
@@ -77,6 +77,60 @@ const LazyOAuthRequiredModal = lazy(() =>
const logger = createLogger('Workflow')
const DEFAULT_PASTE_OFFSET = { x: 50, y: 50 }
/**
* Calculates the offset to paste blocks at viewport center
*/
function calculatePasteOffset(
clipboard: {
blocks: Record<string, { position: { x: number; y: number }; type: string; height?: number }>
} | null,
screenToFlowPosition: (pos: { x: number; y: number }) => { x: number; y: number }
): { x: number; y: number } {
if (!clipboard) return DEFAULT_PASTE_OFFSET
const clipboardBlocks = Object.values(clipboard.blocks)
if (clipboardBlocks.length === 0) return DEFAULT_PASTE_OFFSET
// Calculate bounding box using proper dimensions
const minX = Math.min(...clipboardBlocks.map((b) => b.position.x))
const maxX = Math.max(
...clipboardBlocks.map((b) => {
const width =
b.type === 'loop' || b.type === 'parallel'
? CONTAINER_DIMENSIONS.DEFAULT_WIDTH
: BLOCK_DIMENSIONS.FIXED_WIDTH
return b.position.x + width
})
)
const minY = Math.min(...clipboardBlocks.map((b) => b.position.y))
const maxY = Math.max(
...clipboardBlocks.map((b) => {
const height =
b.type === 'loop' || b.type === 'parallel'
? CONTAINER_DIMENSIONS.DEFAULT_HEIGHT
: Math.max(b.height || BLOCK_DIMENSIONS.MIN_HEIGHT, BLOCK_DIMENSIONS.MIN_HEIGHT)
return b.position.y + height
})
)
const clipboardCenter = { x: (minX + maxX) / 2, y: (minY + maxY) / 2 }
const flowContainer = document.querySelector('.react-flow')
if (!flowContainer) return DEFAULT_PASTE_OFFSET
const rect = flowContainer.getBoundingClientRect()
const viewportCenter = screenToFlowPosition({
x: rect.width / 2,
y: rect.height / 2,
})
return {
x: viewportCenter.x - clipboardCenter.x,
y: viewportCenter.y - clipboardCenter.y,
}
}
/** Custom node types for ReactFlow. */
const nodeTypes: NodeTypes = {
workflowBlock: WorkflowBlock,
@@ -93,17 +147,13 @@ const edgeTypes: EdgeTypes = {
/** ReactFlow configuration constants. */
const defaultEdgeOptions = { type: 'custom' }
/** Tailwind classes for ReactFlow internal element styling */
const reactFlowStyles = [
// Z-index layering
'[&_.react-flow__edges]:!z-0',
'[&_.react-flow__node]:!z-[21]',
'[&_.react-flow__handle]:!z-[30]',
'[&_.react-flow__edge-labels]:!z-[60]',
// Light mode: transparent pane to show dots
'[&_.react-flow__pane]:!bg-transparent',
'[&_.react-flow__renderer]:!bg-transparent',
// Dark mode: solid background, hide dots
'dark:[&_.react-flow__pane]:!bg-[var(--bg)]',
'dark:[&_.react-flow__renderer]:!bg-[var(--bg)]',
'dark:[&_.react-flow__background]:hidden',
@@ -132,6 +182,8 @@ const WorkflowContent = React.memo(() => {
const [isCanvasReady, setIsCanvasReady] = useState(false)
const [potentialParentId, setPotentialParentId] = useState<string | null>(null)
const [selectedEdgeInfo, setSelectedEdgeInfo] = useState<SelectedEdgeInfo | null>(null)
const [isShiftPressed, setIsShiftPressed] = useState(false)
const [isSelectionDragActive, setIsSelectionDragActive] = useState(false)
const [isErrorConnectionDrag, setIsErrorConnectionDrag] = useState(false)
const [oauthModal, setOauthModal] = useState<{
provider: OAuthProvider
@@ -151,12 +203,25 @@ const WorkflowContent = React.memo(() => {
const addNotification = useNotificationStore((state) => state.addNotification)
const { workflows, activeWorkflowId, hydration, setActiveWorkflow } = useWorkflowRegistry(
const {
workflows,
activeWorkflowId,
hydration,
setActiveWorkflow,
copyBlocks,
preparePasteData,
hasClipboard,
clipboard,
} = useWorkflowRegistry(
useShallow((state) => ({
workflows: state.workflows,
activeWorkflowId: state.activeWorkflowId,
hydration: state.hydration,
setActiveWorkflow: state.setActiveWorkflow,
copyBlocks: state.copyBlocks,
preparePasteData: state.preparePasteData,
hasClipboard: state.hasClipboard,
clipboard: state.clipboard,
}))
)
@@ -336,16 +401,56 @@ const WorkflowContent = React.memo(() => {
}, [userPermissions, currentWorkflow.isSnapshotView])
const {
collaborativeAddBlock: addBlock,
collaborativeAddEdge: addEdge,
collaborativeRemoveBlock: removeBlock,
collaborativeRemoveEdge: removeEdge,
collaborativeUpdateBlockPosition,
collaborativeBatchUpdatePositions,
collaborativeUpdateParentId: updateParentId,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
undo,
redo,
} = useCollaborativeWorkflow()
const updateBlockPosition = useCallback(
(id: string, position: { x: number; y: number }) => {
collaborativeBatchUpdatePositions([{ id, position }])
},
[collaborativeBatchUpdatePositions]
)
const addBlock = useCallback(
(
id: string,
type: string,
name: string,
position: { x: number; y: number },
data?: Record<string, unknown>,
parentId?: string,
extent?: 'parent',
autoConnectEdge?: Edge,
triggerMode?: boolean
) => {
const blockData: Record<string, unknown> = { ...(data || {}) }
if (parentId) blockData.parentId = parentId
if (extent) blockData.extent = extent
const block = prepareBlockState({
id,
type,
name,
position,
data: blockData,
parentId,
extent,
triggerMode,
})
collaborativeBatchAddBlocks([block], autoConnectEdge ? [autoConnectEdge] : [], {}, {}, {})
usePanelEditorStore.getState().setCurrentBlockId(id)
},
[collaborativeBatchAddBlocks]
)
const { activeBlockIds, pendingBlocks, isDebugging } = useExecutionStore(
useShallow((state) => ({
activeBlockIds: state.activeBlockIds,
@@ -419,7 +524,7 @@ const WorkflowContent = React.memo(() => {
const result = updateNodeParentUtil(
nodeId,
newParentId,
collaborativeUpdateBlockPosition,
updateBlockPosition,
updateParentId,
() => resizeLoopNodesWrapper()
)
@@ -443,7 +548,7 @@ const WorkflowContent = React.memo(() => {
},
[
getNodes,
collaborativeUpdateBlockPosition,
updateBlockPosition,
updateParentId,
blocks,
edgesForDisplay,
@@ -495,6 +600,54 @@ const WorkflowContent = React.memo(() => {
) {
event.preventDefault()
redo()
} else if ((event.ctrlKey || event.metaKey) && event.key === 'c') {
const selectedNodes = getNodes().filter((node) => node.selected)
if (selectedNodes.length > 0) {
event.preventDefault()
copyBlocks(selectedNodes.map((node) => node.id))
} else {
const currentBlockId = usePanelEditorStore.getState().currentBlockId
if (currentBlockId && blocks[currentBlockId]) {
event.preventDefault()
copyBlocks([currentBlockId])
}
}
} else if ((event.ctrlKey || event.metaKey) && event.key === 'v') {
if (effectivePermissions.canEdit && hasClipboard()) {
event.preventDefault()
// Calculate offset to paste blocks at viewport center
const pasteOffset = calculatePasteOffset(clipboard, screenToFlowPosition)
const pasteData = preparePasteData(pasteOffset)
if (pasteData) {
const pastedBlocks = Object.values(pasteData.blocks)
const hasTriggerInPaste = pastedBlocks.some((block) =>
TriggerUtils.isAnyTriggerType(block.type)
)
if (hasTriggerInPaste) {
const existingTrigger = Object.values(blocks).find((block) =>
TriggerUtils.isAnyTriggerType(block.type)
)
if (existingTrigger) {
addNotification({
level: 'error',
message:
'A workflow can only have one trigger block. Please remove the existing one before pasting.',
workflowId: activeWorkflowId || undefined,
})
return
}
}
collaborativeBatchAddBlocks(
pastedBlocks,
pasteData.edges,
pasteData.loops,
pasteData.parallels,
pasteData.subBlockValues
)
}
}
}
}
@@ -504,7 +657,22 @@ const WorkflowContent = React.memo(() => {
window.removeEventListener('keydown', handleKeyDown)
if (cleanup) cleanup()
}
}, [debouncedAutoLayout, undo, redo])
}, [
debouncedAutoLayout,
undo,
redo,
getNodes,
copyBlocks,
preparePasteData,
collaborativeBatchAddBlocks,
hasClipboard,
effectivePermissions.canEdit,
blocks,
addNotification,
activeWorkflowId,
clipboard,
screenToFlowPosition,
])
/**
* Removes all edges connected to a block, skipping individual edge recording for undo/redo.
@@ -617,14 +785,17 @@ const WorkflowContent = React.memo(() => {
/** Creates a standardized edge object for workflow connections. */
const createEdgeObject = useCallback(
(sourceId: string, targetId: string, sourceHandle: string): Edge => ({
id: crypto.randomUUID(),
source: sourceId,
target: targetId,
sourceHandle,
targetHandle: 'target',
type: 'workflowEdge',
}),
(sourceId: string, targetId: string, sourceHandle: string): Edge => {
const edge = {
id: crypto.randomUUID(),
source: sourceId,
target: targetId,
sourceHandle,
targetHandle: 'target',
type: 'workflowEdge',
}
return edge
},
[]
)
@@ -1568,6 +1739,21 @@ const WorkflowContent = React.memo(() => {
// Local state for nodes - allows smooth drag without store updates on every frame
const [displayNodes, setDisplayNodes] = useState<Node[]>([])
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Shift') setIsShiftPressed(true)
}
const handleKeyUp = (e: KeyboardEvent) => {
if (e.key === 'Shift') setIsShiftPressed(false)
}
window.addEventListener('keydown', handleKeyDown)
window.addEventListener('keyup', handleKeyUp)
return () => {
window.removeEventListener('keydown', handleKeyDown)
window.removeEventListener('keyup', handleKeyUp)
}
}, [])
// Sync derived nodes to display nodes when structure changes
useEffect(() => {
setDisplayNodes(derivedNodes)
@@ -1673,21 +1859,12 @@ const WorkflowContent = React.memo(() => {
missingParentId: parentId,
})
// Fix the node by removing its parent reference and calculating absolute position
const absolutePosition = getNodeAbsolutePosition(id)
// Update the node to remove parent reference and use absolute position
collaborativeUpdateBlockPosition(id, absolutePosition)
updateBlockPosition(id, absolutePosition)
updateParentId(id, '', 'parent')
}
})
}, [
blocks,
collaborativeUpdateBlockPosition,
updateParentId,
getNodeAbsolutePosition,
isWorkflowReady,
])
}, [blocks, updateBlockPosition, updateParentId, getNodeAbsolutePosition, isWorkflowReady])
/** Handles edge removal changes. */
const onEdgesChange = useCallback(
@@ -2095,9 +2272,7 @@ const WorkflowContent = React.memo(() => {
}
}
// Emit collaborative position update for the final position
// This ensures other users see the smooth final position
collaborativeUpdateBlockPosition(node.id, finalPosition, true)
updateBlockPosition(node.id, finalPosition)
// Record single move entry on drag end to avoid micro-moves
const start = getDragStartPosition()
@@ -2218,7 +2393,7 @@ const WorkflowContent = React.memo(() => {
dragStartParentId,
potentialParentId,
updateNodeParent,
collaborativeUpdateBlockPosition,
updateBlockPosition,
addEdge,
tryCreateAutoConnectEdge,
blocks,
@@ -2232,7 +2407,57 @@ const WorkflowContent = React.memo(() => {
]
)
/** Clears edge selection and panel state when clicking empty canvas. */
// Lock selection mode when selection drag starts (captures Shift state at drag start)
const onSelectionStart = useCallback(() => {
if (isShiftPressed) {
setIsSelectionDragActive(true)
}
}, [isShiftPressed])
const onSelectionEnd = useCallback(() => {
requestAnimationFrame(() => setIsSelectionDragActive(false))
}, [])
const onSelectionDragStop = useCallback(
(_event: React.MouseEvent, nodes: any[]) => {
requestAnimationFrame(() => setIsSelectionDragActive(false))
if (nodes.length === 0) return
const positionUpdates = nodes.map((node) => {
const currentBlock = blocks[node.id]
const currentParentId = currentBlock?.data?.parentId
let finalPosition = node.position
if (currentParentId) {
const parentNode = getNodes().find((n) => n.id === currentParentId)
if (parentNode) {
const containerDimensions = {
width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
}
const blockDimensions = {
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
height: Math.max(
currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT,
BLOCK_DIMENSIONS.MIN_HEIGHT
),
}
finalPosition = clampPositionToContainer(
node.position,
containerDimensions,
blockDimensions
)
}
}
return { id: node.id, position: finalPosition }
})
collaborativeBatchUpdatePositions(positionUpdates)
},
[blocks, getNodes, collaborativeBatchUpdatePositions]
)
const onPaneClick = useCallback(() => {
setSelectedEdgeInfo(null)
usePanelEditorStore.getState().clearCurrentBlock()
@@ -2333,13 +2558,19 @@ const WorkflowContent = React.memo(() => {
}
event.preventDefault()
const primaryNode = selectedNodes[0]
removeBlock(primaryNode.id)
const selectedIds = selectedNodes.map((node) => node.id)
collaborativeBatchRemoveBlocks(selectedIds)
}
window.addEventListener('keydown', handleKeyDown)
return () => window.removeEventListener('keydown', handleKeyDown)
}, [selectedEdgeInfo, removeEdge, getNodes, removeBlock, effectivePermissions.canEdit])
}, [
selectedEdgeInfo,
removeEdge,
getNodes,
collaborativeBatchRemoveBlocks,
effectivePermissions.canEdit,
])
return (
<div className='flex h-full w-full flex-col overflow-hidden bg-[var(--bg)]'>
@@ -2390,15 +2621,18 @@ const WorkflowContent = React.memo(() => {
proOptions={reactFlowProOptions}
connectionLineStyle={connectionLineStyle}
connectionLineType={ConnectionLineType.SmoothStep}
onNodeClick={(e, _node) => {
e.stopPropagation()
}}
onPaneClick={onPaneClick}
onEdgeClick={onEdgeClick}
onPaneContextMenu={(e) => e.preventDefault()}
onNodeContextMenu={(e) => e.preventDefault()}
onPointerMove={handleCanvasPointerMove}
onPointerLeave={handleCanvasPointerLeave}
elementsSelectable={true}
selectNodesOnDrag={false}
selectionOnDrag={isShiftPressed || isSelectionDragActive}
panOnDrag={isShiftPressed || isSelectionDragActive ? false : [0, 1]}
onSelectionStart={onSelectionStart}
onSelectionEnd={onSelectionEnd}
multiSelectionKeyCode={['Meta', 'Control']}
nodesConnectable={effectivePermissions.canEdit}
nodesDraggable={effectivePermissions.canEdit}
draggable={false}
@@ -2408,6 +2642,7 @@ const WorkflowContent = React.memo(() => {
className={`workflow-container h-full bg-[var(--bg)] transition-opacity duration-150 ${reactFlowStyles} ${isCanvasReady ? 'opacity-100' : 'opacity-0'}`}
onNodeDrag={effectivePermissions.canEdit ? onNodeDrag : undefined}
onNodeDragStop={effectivePermissions.canEdit ? onNodeDragStop : undefined}
onSelectionDragStop={effectivePermissions.canEdit ? onSelectionDragStop : undefined}
onNodeDragStart={effectivePermissions.canEdit ? onNodeDragStart : undefined}
snapToGrid={snapToGrid}
snapGrid={snapGrid}

View File

@@ -2,8 +2,6 @@ import { useCallback, useEffect, useRef } from 'react'
import { createLogger } from '@sim/logger'
import type { Edge } from 'reactflow'
import { useSession } from '@/lib/auth/auth-client'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import { getBlock } from '@/blocks'
@@ -16,9 +14,9 @@ import { useUndoRedoStore } from '@/stores/undo-redo'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getUniqueBlockName, mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
import { mergeSubblockState, normalizeName } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState, Position } from '@/stores/workflows/workflow/types'
import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/workflow/types'
const logger = createLogger('CollaborativeWorkflow')
@@ -201,39 +199,6 @@ export function useCollaborativeWorkflow() {
try {
if (target === 'block') {
switch (operation) {
case 'add':
workflowStore.addBlock(
payload.id,
payload.type,
payload.name,
payload.position,
payload.data,
payload.parentId,
payload.extent,
{
enabled: payload.enabled,
horizontalHandles: payload.horizontalHandles,
advancedMode: payload.advancedMode,
triggerMode: payload.triggerMode ?? false,
height: payload.height,
}
)
if (payload.autoConnectEdge) {
workflowStore.addEdge(payload.autoConnectEdge)
}
// Apply subblock values if present in payload
if (payload.subBlocks && typeof payload.subBlocks === 'object') {
Object.entries(payload.subBlocks).forEach(([subblockId, subblock]) => {
if (WEBHOOK_SUBBLOCK_FIELDS.includes(subblockId)) {
return
}
const value = (subblock as any)?.value
if (value !== undefined && value !== null) {
subBlockStore.setValue(payload.id, subblockId, value)
}
})
}
break
case 'update-position': {
const blockId = payload.id
@@ -265,40 +230,6 @@ export function useCollaborativeWorkflow() {
case 'update-name':
workflowStore.updateBlockName(payload.id, payload.name)
break
case 'remove': {
const blockId = payload.id
const blocksToRemove = new Set<string>([blockId])
const findAllDescendants = (parentId: string) => {
Object.entries(workflowStore.blocks).forEach(([id, block]) => {
if (block.data?.parentId === parentId) {
blocksToRemove.add(id)
findAllDescendants(id)
}
})
}
findAllDescendants(blockId)
workflowStore.removeBlock(blockId)
lastPositionTimestamps.current.delete(blockId)
const updatedBlocks = useWorkflowStore.getState().blocks
const updatedEdges = useWorkflowStore.getState().edges
const graph = {
blocksById: updatedBlocks,
edgesById: Object.fromEntries(updatedEdges.map((e) => [e.id, e])),
}
const undoRedoStore = useUndoRedoStore.getState()
const stackKeys = Object.keys(undoRedoStore.stacks)
stackKeys.forEach((key) => {
const [workflowId, userId] = key.split(':')
if (workflowId === activeWorkflowId) {
undoRedoStore.pruneInvalidEntries(workflowId, userId, graph)
}
})
break
}
case 'toggle-enabled':
workflowStore.toggleBlockEnabled(payload.id)
break
@@ -318,40 +249,20 @@ export function useCollaborativeWorkflow() {
}
break
}
case 'duplicate':
workflowStore.addBlock(
payload.id,
payload.type,
payload.name,
payload.position,
payload.data,
payload.parentId,
payload.extent,
{
enabled: payload.enabled,
horizontalHandles: payload.horizontalHandles,
advancedMode: payload.advancedMode,
triggerMode: payload.triggerMode ?? false,
height: payload.height,
}
)
// Handle auto-connect edge if present
if (payload.autoConnectEdge) {
workflowStore.addEdge(payload.autoConnectEdge)
}
// Apply subblock values from duplicate payload so collaborators see content immediately
if (payload.subBlocks && typeof payload.subBlocks === 'object') {
Object.entries(payload.subBlocks).forEach(([subblockId, subblock]) => {
if (WEBHOOK_SUBBLOCK_FIELDS.includes(subblockId)) {
return
}
const value = (subblock as any)?.value
if (value !== undefined) {
subBlockStore.setValue(payload.id, subblockId, value)
}
} else if (target === 'blocks') {
switch (operation) {
case 'batch-update-positions': {
const { updates } = payload
if (Array.isArray(updates)) {
updates.forEach(({ id, position }: { id: string; position: Position }) => {
if (id && position) {
workflowStore.updateBlockPosition(id, position)
}
})
}
break
}
}
} else if (target === 'edge') {
switch (operation) {
@@ -439,9 +350,6 @@ export function useCollaborativeWorkflow() {
case 'remove':
variablesStore.deleteVariable(payload.variableId)
break
case 'duplicate':
variablesStore.duplicateVariable(payload.sourceVariableId, payload.id)
break
}
} else if (target === 'workflow') {
switch (operation) {
@@ -477,6 +385,93 @@ export function useCollaborativeWorkflow() {
break
}
}
if (target === 'blocks') {
switch (operation) {
case 'batch-add-blocks': {
const {
blocks,
edges,
loops,
parallels,
subBlockValues: addedSubBlockValues,
} = payload
logger.info('Received batch-add-blocks from remote user', {
userId,
blockCount: (blocks || []).length,
edgeCount: (edges || []).length,
})
;(blocks || []).forEach((block: BlockState) => {
workflowStore.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode ?? false,
height: block.height,
}
)
})
;(edges || []).forEach((edge: Edge) => {
workflowStore.addEdge(edge)
})
if (loops) {
Object.entries(loops as Record<string, Loop>).forEach(([loopId, loopConfig]) => {
useWorkflowStore.setState((state) => ({
loops: { ...state.loops, [loopId]: loopConfig },
}))
})
}
if (parallels) {
Object.entries(parallels as Record<string, Parallel>).forEach(
([parallelId, parallelConfig]) => {
useWorkflowStore.setState((state) => ({
parallels: { ...state.parallels, [parallelId]: parallelConfig },
}))
}
)
}
if (addedSubBlockValues && activeWorkflowId) {
Object.entries(
addedSubBlockValues as Record<string, Record<string, unknown>>
).forEach(([blockId, subBlocks]) => {
Object.entries(subBlocks).forEach(([subBlockId, value]) => {
subBlockStore.setValue(blockId, subBlockId, value)
})
})
}
logger.info('Successfully applied batch-add-blocks from remote user')
break
}
case 'batch-remove-blocks': {
const { ids } = payload
logger.info('Received batch-remove-blocks from remote user', {
userId,
count: (ids || []).length,
})
;(ids || []).forEach((id: string) => {
workflowStore.removeBlock(id)
})
logger.info('Successfully applied batch-remove-blocks from remote user')
break
}
}
}
} catch (error) {
logger.error('Error applying remote operation:', error)
} finally {
@@ -726,294 +721,33 @@ export function useCollaborativeWorkflow() {
]
)
const executeQueuedDebouncedOperation = useCallback(
(operation: string, target: string, payload: any, localAction: () => void) => {
if (isApplyingRemoteChange.current) return
if (isBaselineDiffView) {
logger.debug('Skipping debounced socket operation while viewing baseline diff:', operation)
return
}
const collaborativeBatchUpdatePositions = useCallback(
(updates: Array<{ id: string; position: Position }>) => {
if (!isInActiveRoom()) {
logger.debug('Skipping debounced operation - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
operation,
target,
})
logger.debug('Skipping batch position update - not in active workflow')
return
}
localAction()
if (updates.length === 0) return
emitWorkflowOperation(operation, target, payload)
},
[emitWorkflowOperation, isBaselineDiffView, isInActiveRoom, currentWorkflowId, activeWorkflowId]
)
const collaborativeAddBlock = useCallback(
(
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
autoConnectEdge?: Edge,
triggerMode?: boolean
) => {
// Skip socket operations when viewing baseline diff
if (isBaselineDiffView) {
logger.debug('Skipping collaborative add block while viewing baseline diff')
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping collaborative add block - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
})
return
}
const blockConfig = getBlock(type)
// Handle loop/parallel blocks that don't use BlockConfig
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
// For loop/parallel blocks, use empty subBlocks and outputs
const completeBlockData = {
id,
type,
name,
position,
data: data || {},
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode: triggerMode || false,
height: 0,
parentId,
extent,
autoConnectEdge, // Include edge data for atomic operation
}
// Skip if applying remote changes (don't auto-select blocks added by other users)
if (isApplyingRemoteChange.current) {
workflowStore.addBlock(id, type, name, position, data, parentId, extent, {
triggerMode: triggerMode || false,
})
if (autoConnectEdge) {
workflowStore.addEdge(autoConnectEdge)
}
return
}
// Generate operation ID for queue tracking
const operationId = crypto.randomUUID()
// Add to queue for retry mechanism
addToQueue({
id: operationId,
operation: {
operation: 'add',
target: 'block',
payload: completeBlockData,
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
// Apply locally first (immediate UI feedback)
workflowStore.addBlock(id, type, name, position, data, parentId, extent, {
triggerMode: triggerMode || false,
})
if (autoConnectEdge) {
workflowStore.addEdge(autoConnectEdge)
}
// Record for undo AFTER adding (pass the autoConnectEdge explicitly)
undoRedo.recordAddBlock(id, autoConnectEdge)
// Automatically select the newly added block (opens editor tab)
usePanelEditorStore.getState().setCurrentBlockId(id)
return
}
if (!blockConfig) {
logger.error(`Block type ${type} not found`)
return
}
// Generate subBlocks and outputs from the block configuration
const subBlocks: Record<string, any> = {}
if (blockConfig.subBlocks) {
blockConfig.subBlocks.forEach((subBlock) => {
let initialValue: unknown = null
if (typeof subBlock.value === 'function') {
try {
initialValue = subBlock.value({})
} catch (error) {
logger.warn('Failed to resolve dynamic sub-block default value', {
subBlockId: subBlock.id,
error: error instanceof Error ? error.message : String(error),
})
}
} else if (subBlock.defaultValue !== undefined) {
initialValue = subBlock.defaultValue
} else if (subBlock.type === 'input-format') {
initialValue = [
{
id: crypto.randomUUID(),
name: '',
type: 'string',
value: '',
collapsed: false,
},
]
} else if (subBlock.type === 'table') {
initialValue = []
}
subBlocks[subBlock.id] = {
id: subBlock.id,
type: subBlock.type,
value: initialValue,
}
})
}
// Get outputs based on trigger mode
const isTriggerMode = triggerMode || false
const outputs = getBlockOutputs(type, subBlocks, isTriggerMode)
const completeBlockData = {
id,
type,
name,
position,
data: data || {},
subBlocks,
outputs,
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode: isTriggerMode,
height: 0, // Default height, will be set by the UI
parentId,
extent,
autoConnectEdge, // Include edge data for atomic operation
}
// Skip if applying remote changes (don't auto-select blocks added by other users)
if (isApplyingRemoteChange.current) return
// Generate operation ID
const operationId = crypto.randomUUID()
// Add to queue
addToQueue({
id: operationId,
operation: {
operation: 'add',
target: 'block',
payload: completeBlockData,
operation: 'batch-update-positions',
target: 'blocks',
payload: { updates },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
// Apply locally
workflowStore.addBlock(id, type, name, position, data, parentId, extent, {
triggerMode: triggerMode || false,
})
if (autoConnectEdge) {
workflowStore.addEdge(autoConnectEdge)
}
// Record for undo AFTER adding (pass the autoConnectEdge explicitly)
undoRedo.recordAddBlock(id, autoConnectEdge)
// Automatically select the newly added block (opens editor tab)
usePanelEditorStore.getState().setCurrentBlockId(id)
},
[
workflowStore,
activeWorkflowId,
addToQueue,
session?.user?.id,
isBaselineDiffView,
isInActiveRoom,
currentWorkflowId,
undoRedo,
]
)
const collaborativeRemoveBlock = useCallback(
(id: string) => {
cancelOperationsForBlock(id)
// Get all blocks that will be removed (including nested blocks in subflows)
const blocksToRemove = new Set<string>([id])
const findAllDescendants = (parentId: string) => {
Object.entries(workflowStore.blocks).forEach(([blockId, block]) => {
if (block.data?.parentId === parentId) {
blocksToRemove.add(blockId)
findAllDescendants(blockId)
}
})
}
findAllDescendants(id)
// If the currently edited block is among the blocks being removed, clear selection to reset the panel
const currentEditedBlockId = usePanelEditorStore.getState().currentBlockId
if (currentEditedBlockId && blocksToRemove.has(currentEditedBlockId)) {
usePanelEditorStore.getState().clearCurrentBlock()
}
// Capture state before removal, including all nested blocks with subblock values
const allBlocks = mergeSubblockState(workflowStore.blocks, activeWorkflowId || undefined)
const capturedBlocks: Record<string, BlockState> = {}
blocksToRemove.forEach((blockId) => {
if (allBlocks[blockId]) {
capturedBlocks[blockId] = allBlocks[blockId]
}
})
// Capture all edges connected to any of the blocks being removed
const edges = workflowStore.edges.filter(
(edge) => blocksToRemove.has(edge.source) || blocksToRemove.has(edge.target)
)
if (Object.keys(capturedBlocks).length > 0) {
undoRedo.recordRemoveBlock(id, capturedBlocks[id], edges, capturedBlocks)
}
executeQueuedOperation('remove', 'block', { id }, () => workflowStore.removeBlock(id))
},
[executeQueuedOperation, workflowStore, cancelOperationsForBlock, undoRedo, activeWorkflowId]
)
const collaborativeUpdateBlockPosition = useCallback(
(id: string, position: Position, commit = true) => {
if (commit) {
executeQueuedOperation('update-position', 'block', { id, position, commit }, () => {
workflowStore.updateBlockPosition(id, position)
})
return
}
executeQueuedDebouncedOperation('update-position', 'block', { id, position }, () => {
updates.forEach(({ id, position }) => {
workflowStore.updateBlockPosition(id, position)
})
},
[executeQueuedDebouncedOperation, executeQueuedOperation, workflowStore]
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore]
)
const collaborativeUpdateBlockName = useCallback(
@@ -1333,148 +1067,6 @@ export function useCollaborativeWorkflow() {
]
)
const collaborativeDuplicateBlock = useCallback(
(sourceId: string) => {
if (!isInActiveRoom()) {
logger.debug('Skipping duplicate block - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
sourceId,
})
return
}
const sourceBlock = workflowStore.blocks[sourceId]
if (!sourceBlock) return
// Prevent duplication of start blocks (both legacy starter and unified start_trigger)
if (sourceBlock.type === 'starter' || sourceBlock.type === 'start_trigger') {
logger.warn('Cannot duplicate start block - only one start block allowed per workflow', {
blockId: sourceId,
blockType: sourceBlock.type,
})
return
}
// Generate new ID and calculate position
const newId = crypto.randomUUID()
const offsetPosition = {
x: sourceBlock.position.x + DEFAULT_DUPLICATE_OFFSET.x,
y: sourceBlock.position.y + DEFAULT_DUPLICATE_OFFSET.y,
}
const newName = getUniqueBlockName(sourceBlock.name, workflowStore.blocks)
// Get subblock values from the store, excluding webhook-specific fields
const allSubBlockValues =
subBlockStore.workflowValues[activeWorkflowId || '']?.[sourceId] || {}
const subBlockValues = Object.fromEntries(
Object.entries(allSubBlockValues).filter(([key]) => !WEBHOOK_SUBBLOCK_FIELDS.includes(key))
)
// Merge subblock structure with actual values
const mergedSubBlocks = sourceBlock.subBlocks
? JSON.parse(JSON.stringify(sourceBlock.subBlocks))
: {}
WEBHOOK_SUBBLOCK_FIELDS.forEach((field) => {
if (field in mergedSubBlocks) {
delete mergedSubBlocks[field]
}
})
Object.entries(subBlockValues).forEach(([subblockId, value]) => {
if (mergedSubBlocks[subblockId]) {
mergedSubBlocks[subblockId].value = value
} else {
// Create subblock if it doesn't exist in structure
mergedSubBlocks[subblockId] = {
id: subblockId,
type: 'unknown',
value: value,
}
}
})
// Create the complete block data for the socket operation
const duplicatedBlockData = {
sourceId,
id: newId,
type: sourceBlock.type,
name: newName,
position: offsetPosition,
data: sourceBlock.data ? JSON.parse(JSON.stringify(sourceBlock.data)) : {},
subBlocks: mergedSubBlocks,
outputs: sourceBlock.outputs ? JSON.parse(JSON.stringify(sourceBlock.outputs)) : {},
parentId: sourceBlock.data?.parentId || null,
extent: sourceBlock.data?.extent || null,
enabled: sourceBlock.enabled ?? true,
horizontalHandles: sourceBlock.horizontalHandles ?? true,
advancedMode: sourceBlock.advancedMode ?? false,
triggerMode: sourceBlock.triggerMode ?? false,
height: sourceBlock.height || 0,
}
workflowStore.addBlock(
newId,
sourceBlock.type,
newName,
offsetPosition,
sourceBlock.data ? JSON.parse(JSON.stringify(sourceBlock.data)) : {},
sourceBlock.data?.parentId,
sourceBlock.data?.extent,
{
enabled: sourceBlock.enabled,
horizontalHandles: sourceBlock.horizontalHandles,
advancedMode: sourceBlock.advancedMode,
triggerMode: sourceBlock.triggerMode ?? false,
height: sourceBlock.height,
}
)
// Focus the newly duplicated block in the editor
usePanelEditorStore.getState().setCurrentBlockId(newId)
executeQueuedOperation('duplicate', 'block', duplicatedBlockData, () => {
workflowStore.addBlock(
newId,
sourceBlock.type,
newName,
offsetPosition,
sourceBlock.data ? JSON.parse(JSON.stringify(sourceBlock.data)) : {},
sourceBlock.data?.parentId,
sourceBlock.data?.extent,
{
enabled: sourceBlock.enabled,
horizontalHandles: sourceBlock.horizontalHandles,
advancedMode: sourceBlock.advancedMode,
triggerMode: sourceBlock.triggerMode ?? false,
height: sourceBlock.height,
}
)
// Apply subblock values locally for immediate UI feedback
// The server will persist these values as part of the block creation
if (activeWorkflowId && Object.keys(subBlockValues).length > 0) {
Object.entries(subBlockValues).forEach(([subblockId, value]) => {
subBlockStore.setValue(newId, subblockId, value)
})
}
// Record for undo after the block is added
undoRedo.recordDuplicateBlock(sourceId, newId, duplicatedBlockData, undefined)
})
},
[
executeQueuedOperation,
workflowStore,
subBlockStore,
activeWorkflowId,
isInActiveRoom,
currentWorkflowId,
undoRedo,
]
)
const collaborativeUpdateLoopType = useCallback(
(loopId: string, loopType: 'for' | 'forEach' | 'while' | 'doWhile') => {
const currentBlock = workflowStore.blocks[loopId]
@@ -1714,23 +1306,196 @@ export function useCollaborativeWorkflow() {
[executeQueuedOperation, variablesStore, cancelOperationsForVariable]
)
const collaborativeDuplicateVariable = useCallback(
(variableId: string) => {
const newId = crypto.randomUUID()
const sourceVariable = useVariablesStore.getState().variables[variableId]
if (!sourceVariable) return null
const collaborativeBatchAddBlocks = useCallback(
(
blocks: BlockState[],
edges: Edge[] = [],
loops: Record<string, Loop> = {},
parallels: Record<string, Parallel> = {},
subBlockValues: Record<string, Record<string, unknown>> = {},
options?: { skipUndoRedo?: boolean }
) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch add blocks - not in active workflow')
return false
}
executeQueuedOperation(
'duplicate',
'variable',
{ sourceVariableId: variableId, id: newId },
() => {
variablesStore.duplicateVariable(variableId, newId)
}
)
return newId
if (isBaselineDiffView) {
logger.debug('Skipping batch add blocks while viewing baseline diff')
return false
}
if (blocks.length === 0) return false
logger.info('Batch adding blocks collaboratively', {
blockCount: blocks.length,
edgeCount: edges.length,
})
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: 'batch-add-blocks',
target: 'blocks',
payload: { blocks, edges, loops, parallels, subBlockValues },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
blocks.forEach((block) => {
workflowStore.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode ?? false,
height: block.height,
}
)
})
edges.forEach((edge) => {
workflowStore.addEdge(edge)
})
if (Object.keys(loops).length > 0) {
useWorkflowStore.setState((state) => ({
loops: { ...state.loops, ...loops },
}))
}
if (Object.keys(parallels).length > 0) {
useWorkflowStore.setState((state) => ({
parallels: { ...state.parallels, ...parallels },
}))
}
if (activeWorkflowId) {
Object.entries(subBlockValues).forEach(([blockId, subBlocks]) => {
Object.entries(subBlocks).forEach(([subBlockId, value]) => {
subBlockStore.setValue(blockId, subBlockId, value)
})
})
}
if (!options?.skipUndoRedo) {
undoRedo.recordBatchAddBlocks(blocks, edges, subBlockValues)
}
return true
},
[executeQueuedOperation, variablesStore]
[
addToQueue,
activeWorkflowId,
session?.user?.id,
isBaselineDiffView,
isInActiveRoom,
workflowStore,
subBlockStore,
undoRedo,
]
)
const collaborativeBatchRemoveBlocks = useCallback(
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
return false
}
if (blockIds.length === 0) return false
blockIds.forEach((id) => cancelOperationsForBlock(id))
const allBlocksToRemove = new Set<string>(blockIds)
const findAllDescendants = (parentId: string) => {
Object.entries(workflowStore.blocks).forEach(([blockId, block]) => {
if (block.data?.parentId === parentId) {
allBlocksToRemove.add(blockId)
findAllDescendants(blockId)
}
})
}
blockIds.forEach((id) => findAllDescendants(id))
const currentEditedBlockId = usePanelEditorStore.getState().currentBlockId
if (currentEditedBlockId && allBlocksToRemove.has(currentEditedBlockId)) {
usePanelEditorStore.getState().clearCurrentBlock()
}
const mergedBlocks = mergeSubblockState(workflowStore.blocks, activeWorkflowId || undefined)
const blockSnapshots: BlockState[] = []
const subBlockValues: Record<string, Record<string, unknown>> = {}
allBlocksToRemove.forEach((blockId) => {
const block = mergedBlocks[blockId]
if (block) {
blockSnapshots.push(block)
if (block.subBlocks) {
const values: Record<string, unknown> = {}
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
values[subBlockId] = subBlock.value
}
})
if (Object.keys(values).length > 0) {
subBlockValues[blockId] = values
}
}
}
})
const edgeSnapshots = workflowStore.edges.filter(
(e) => allBlocksToRemove.has(e.source) || allBlocksToRemove.has(e.target)
)
logger.info('Batch removing blocks collaboratively', {
requestedCount: blockIds.length,
totalCount: allBlocksToRemove.size,
})
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: 'batch-remove-blocks',
target: 'blocks',
payload: { ids: Array.from(allBlocksToRemove) },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
blockIds.forEach((id) => {
workflowStore.removeBlock(id)
})
if (!options?.skipUndoRedo && blockSnapshots.length > 0) {
undoRedo.recordBatchRemoveBlocks(blockSnapshots, edgeSnapshots, subBlockValues)
}
return true
},
[
addToQueue,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
workflowStore,
cancelOperationsForBlock,
undoRedo,
]
)
return {
@@ -1745,16 +1510,15 @@ export function useCollaborativeWorkflow() {
leaveWorkflow,
// Collaborative operations
collaborativeAddBlock,
collaborativeUpdateBlockPosition,
collaborativeBatchUpdatePositions,
collaborativeUpdateBlockName,
collaborativeRemoveBlock,
collaborativeToggleBlockEnabled,
collaborativeUpdateParentId,
collaborativeToggleBlockAdvancedMode,
collaborativeToggleBlockTriggerMode,
collaborativeToggleBlockHandles,
collaborativeDuplicateBlock,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeAddEdge,
collaborativeRemoveEdge,
collaborativeSetSubblockValue,
@@ -1764,7 +1528,6 @@ export function useCollaborativeWorkflow() {
collaborativeUpdateVariable,
collaborativeAddVariable,
collaborativeDeleteVariable,
collaborativeDuplicateVariable,
// Collaborative loop/parallel operations
collaborativeUpdateLoopType,

View File

@@ -5,11 +5,11 @@ import { useSession } from '@/lib/auth/auth-client'
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
import { useOperationQueue } from '@/stores/operation-queue/store'
import {
type BatchAddBlocksOperation,
type BatchRemoveBlocksOperation,
createOperationEntry,
type DuplicateBlockOperation,
type MoveBlockOperation,
type Operation,
type RemoveBlockOperation,
type RemoveEdgeOperation,
runWithUndoRedoRecordingSuspended,
type UpdateParentOperation,
@@ -17,7 +17,7 @@ import {
} from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getUniqueBlockName, mergeSubblockState } from '@/stores/workflows/utils'
import { mergeSubblockState } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -32,89 +32,94 @@ export function useUndoRedo() {
const userId = session?.user?.id || 'unknown'
const recordAddBlock = useCallback(
(blockId: string, autoConnectEdge?: Edge) => {
if (!activeWorkflowId) return
const recordBatchAddBlocks = useCallback(
(
blockSnapshots: BlockState[],
edgeSnapshots: Edge[] = [],
subBlockValues: Record<string, Record<string, unknown>> = {}
) => {
if (!activeWorkflowId || blockSnapshots.length === 0) return
const operation: Operation = {
const operation: BatchAddBlocksOperation = {
id: crypto.randomUUID(),
type: 'add-block',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockId },
}
// Get fresh state from store
const currentBlocks = useWorkflowStore.getState().blocks
const merged = mergeSubblockState(currentBlocks, activeWorkflowId, blockId)
const blockSnapshot = merged[blockId] || currentBlocks[blockId]
const edgesToRemove = autoConnectEdge ? [autoConnectEdge] : []
const inverse: RemoveBlockOperation = {
id: crypto.randomUUID(),
type: 'remove-block',
type: 'batch-add-blocks',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
blockId,
blockSnapshot,
edgeSnapshots: edgesToRemove,
blockSnapshots,
edgeSnapshots,
subBlockValues,
},
}
const inverse: BatchRemoveBlocksOperation = {
id: crypto.randomUUID(),
type: 'batch-remove-blocks',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
blockSnapshots,
edgeSnapshots,
subBlockValues,
},
}
const entry = createOperationEntry(operation, inverse)
undoRedoStore.push(activeWorkflowId, userId, entry)
logger.debug('Recorded add block', {
blockId,
hasAutoConnect: !!autoConnectEdge,
edgeCount: edgesToRemove.length,
logger.debug('Recorded batch add blocks', {
blockCount: blockSnapshots.length,
edgeCount: edgeSnapshots.length,
workflowId: activeWorkflowId,
hasSnapshot: !!blockSnapshot,
})
},
[activeWorkflowId, userId, undoRedoStore]
)
const recordRemoveBlock = useCallback(
const recordBatchRemoveBlocks = useCallback(
(
blockId: string,
blockSnapshot: BlockState,
edgeSnapshots: Edge[],
allBlockSnapshots?: Record<string, BlockState>
blockSnapshots: BlockState[],
edgeSnapshots: Edge[] = [],
subBlockValues: Record<string, Record<string, unknown>> = {}
) => {
if (!activeWorkflowId) return
if (!activeWorkflowId || blockSnapshots.length === 0) return
const operation: RemoveBlockOperation = {
const operation: BatchRemoveBlocksOperation = {
id: crypto.randomUUID(),
type: 'remove-block',
type: 'batch-remove-blocks',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
blockId,
blockSnapshot,
blockSnapshots,
edgeSnapshots,
allBlockSnapshots,
subBlockValues,
},
}
const inverse: Operation = {
const inverse: BatchAddBlocksOperation = {
id: crypto.randomUUID(),
type: 'add-block',
type: 'batch-add-blocks',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockId },
data: {
blockSnapshots,
edgeSnapshots,
subBlockValues,
},
}
const entry = createOperationEntry(operation, inverse)
undoRedoStore.push(activeWorkflowId, userId, entry)
logger.debug('Recorded remove block', { blockId, workflowId: activeWorkflowId })
logger.debug('Recorded batch remove blocks', {
blockCount: blockSnapshots.length,
edgeCount: edgeSnapshots.length,
workflowId: activeWorkflowId,
})
},
[activeWorkflowId, userId, undoRedoStore]
)
@@ -227,51 +232,6 @@ export function useUndoRedo() {
[activeWorkflowId, userId, undoRedoStore]
)
const recordDuplicateBlock = useCallback(
(
sourceBlockId: string,
duplicatedBlockId: string,
duplicatedBlockSnapshot: BlockState,
autoConnectEdge?: Edge
) => {
if (!activeWorkflowId) return
const operation: DuplicateBlockOperation = {
id: crypto.randomUUID(),
type: 'duplicate-block',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
sourceBlockId,
duplicatedBlockId,
duplicatedBlockSnapshot,
autoConnectEdge,
},
}
// Inverse is to remove the duplicated block
const inverse: RemoveBlockOperation = {
id: crypto.randomUUID(),
type: 'remove-block',
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: {
blockId: duplicatedBlockId,
blockSnapshot: duplicatedBlockSnapshot,
edgeSnapshots: autoConnectEdge ? [autoConnectEdge] : [],
},
}
const entry = createOperationEntry(operation, inverse)
undoRedoStore.push(activeWorkflowId, userId, entry)
logger.debug('Recorded duplicate block', { sourceBlockId, duplicatedBlockId })
},
[activeWorkflowId, userId, undoRedoStore]
)
const recordUpdateParent = useCallback(
(
blockId: string,
@@ -347,204 +307,117 @@ export function useUndoRedo() {
const opId = crypto.randomUUID()
switch (entry.inverse.type) {
case 'remove-block': {
const removeInverse = entry.inverse as RemoveBlockOperation
const blockId = removeInverse.data.blockId
case 'batch-remove-blocks': {
const batchRemoveOp = entry.inverse as BatchRemoveBlocksOperation
const { blockSnapshots } = batchRemoveOp.data
const blockIds = blockSnapshots.map((b) => b.id)
if (workflowStore.blocks[blockId]) {
// Refresh inverse snapshot to capture the latest subblock values and edges at undo time
const mergedNow = mergeSubblockState(workflowStore.blocks, activeWorkflowId, blockId)
const latestBlockSnapshot = mergedNow[blockId] || workflowStore.blocks[blockId]
const latestEdgeSnapshots = workflowStore.edges.filter(
(e) => e.source === blockId || e.target === blockId
)
removeInverse.data.blockSnapshot = latestBlockSnapshot
removeInverse.data.edgeSnapshots = latestEdgeSnapshots
// First remove the edges that were added with the block (autoConnect edge)
const edgesToRemove = removeInverse.data.edgeSnapshots || []
edgesToRemove.forEach((edge) => {
if (workflowStore.edges.find((e) => e.id === edge.id)) {
workflowStore.removeEdge(edge.id)
// Send edge removal to server
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'remove',
target: 'edge',
payload: { id: edge.id },
},
workflowId: activeWorkflowId,
userId,
})
}
})
// Then remove the block
addToQueue({
id: opId,
operation: {
operation: 'remove',
target: 'block',
payload: { id: blockId, isUndo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.removeBlock(blockId)
} else {
logger.debug('Undo remove-block skipped; block missing', {
blockId,
})
}
break
}
case 'add-block': {
const originalOp = entry.operation as RemoveBlockOperation
const { blockSnapshot, edgeSnapshots, allBlockSnapshots } = originalOp.data
if (!blockSnapshot || workflowStore.blocks[blockSnapshot.id]) {
logger.debug('Undo add-block skipped', {
hasSnapshot: Boolean(blockSnapshot),
exists: Boolean(blockSnapshot && workflowStore.blocks[blockSnapshot.id]),
})
const existingBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
if (existingBlockIds.length === 0) {
logger.debug('Undo batch-remove-blocks skipped; no blocks exist')
break
}
// Preserve the original name from the snapshot on undo
const restoredName = blockSnapshot.name
const latestEdges = workflowStore.edges.filter(
(e) => existingBlockIds.includes(e.source) || existingBlockIds.includes(e.target)
)
batchRemoveOp.data.edgeSnapshots = latestEdges
const latestSubBlockValues: Record<string, Record<string, unknown>> = {}
existingBlockIds.forEach((blockId) => {
const merged = mergeSubblockState(workflowStore.blocks, activeWorkflowId, blockId)
const block = merged[blockId]
if (block?.subBlocks) {
const values: Record<string, unknown> = {}
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
values[subBlockId] = subBlock.value
}
})
if (Object.keys(values).length > 0) {
latestSubBlockValues[blockId] = values
}
}
})
batchRemoveOp.data.subBlockValues = latestSubBlockValues
// FIRST: Add the main block (parent subflow) with subBlocks in payload
addToQueue({
id: opId,
operation: {
operation: 'add',
target: 'block',
operation: 'batch-remove-blocks',
target: 'blocks',
payload: { ids: existingBlockIds },
},
workflowId: activeWorkflowId,
userId,
})
existingBlockIds.forEach((id) => workflowStore.removeBlock(id))
break
}
case 'batch-add-blocks': {
const batchAddOp = entry.operation as BatchAddBlocksOperation
const { blockSnapshots, edgeSnapshots, subBlockValues } = batchAddOp.data
const blocksToAdd = blockSnapshots.filter((b) => !workflowStore.blocks[b.id])
if (blocksToAdd.length === 0) {
logger.debug('Undo batch-add-blocks skipped; all blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
...blockSnapshot,
name: restoredName,
subBlocks: blockSnapshot.subBlocks || {},
autoConnectEdge: undefined,
isUndo: true,
originalOpId: entry.id,
blocks: blocksToAdd,
edges: edgeSnapshots || [],
loops: {},
parallels: {},
subBlockValues: subBlockValues || {},
},
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.addBlock(
blockSnapshot.id,
blockSnapshot.type,
restoredName,
blockSnapshot.position,
blockSnapshot.data,
blockSnapshot.data?.parentId,
blockSnapshot.data?.extent,
{
enabled: blockSnapshot.enabled,
horizontalHandles: blockSnapshot.horizontalHandles,
advancedMode: blockSnapshot.advancedMode,
triggerMode: blockSnapshot.triggerMode,
height: blockSnapshot.height,
}
)
// Set subblock values for the main block locally
if (blockSnapshot.subBlocks && activeWorkflowId) {
const subblockValues: Record<string, any> = {}
Object.entries(blockSnapshot.subBlocks).forEach(
([subBlockId, subBlock]: [string, any]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
subblockValues[subBlockId] = subBlock.value
}
blocksToAdd.forEach((block) => {
workflowStore.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (Object.keys(subblockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
[blockSnapshot.id]: subblockValues,
},
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
}))
}
},
}))
}
// SECOND: If this is a subflow with nested blocks, restore them AFTER the parent exists
if (allBlockSnapshots) {
Object.entries(allBlockSnapshots).forEach(([id, snap]: [string, any]) => {
if (id !== blockSnapshot.id && !workflowStore.blocks[id]) {
// Preserve original nested block name from snapshot on undo
const restoredNestedName = snap.name
// Add nested block locally
workflowStore.addBlock(
snap.id,
snap.type,
restoredNestedName,
snap.position,
snap.data,
snap.data?.parentId,
snap.data?.extent,
{
enabled: snap.enabled,
horizontalHandles: snap.horizontalHandles,
advancedMode: snap.advancedMode,
triggerMode: snap.triggerMode,
height: snap.height,
}
)
// Send to server with subBlocks included in payload
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'add',
target: 'block',
payload: {
...snap,
name: restoredNestedName,
subBlocks: snap.subBlocks || {},
autoConnectEdge: undefined,
isUndo: true,
originalOpId: entry.id,
},
},
workflowId: activeWorkflowId,
userId,
})
// Restore subblock values for nested blocks locally
if (snap.subBlocks && activeWorkflowId) {
const subBlockStore = useSubBlockStore.getState()
Object.entries(snap.subBlocks).forEach(
([subBlockId, subBlock]: [string, any]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
subBlockStore.setValue(snap.id, subBlockId, subBlock.value)
}
}
)
}
}
})
}
// THIRD: Finally restore edges after all blocks exist
if (edgeSnapshots && edgeSnapshots.length > 0) {
edgeSnapshots.forEach((edge) => {
workflowStore.addEdge(edge)
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'add',
target: 'edge',
payload: edge,
},
workflowId: activeWorkflowId,
userId,
})
if (!workflowStore.edges.find((e) => e.id === edge.id)) {
workflowStore.addEdge(edge)
}
})
}
break
@@ -639,49 +512,6 @@ export function useUndoRedo() {
}
break
}
case 'duplicate-block': {
// Undo duplicate means removing the duplicated block
const dupOp = entry.operation as DuplicateBlockOperation
const duplicatedId = dupOp.data.duplicatedBlockId
if (workflowStore.blocks[duplicatedId]) {
// Remove any edges connected to the duplicated block
const edges = workflowStore.edges.filter(
(edge) => edge.source === duplicatedId || edge.target === duplicatedId
)
edges.forEach((edge) => {
workflowStore.removeEdge(edge.id)
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'remove',
target: 'edge',
payload: { id: edge.id },
},
workflowId: activeWorkflowId,
userId,
})
})
// Remove the duplicated block
addToQueue({
id: opId,
operation: {
operation: 'remove',
target: 'block',
payload: { id: duplicatedId, isUndo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.removeBlock(duplicatedId)
} else {
logger.debug('Undo duplicate-block skipped; duplicated block missing', {
duplicatedId,
})
}
break
}
case 'update-parent': {
// Undo parent update means reverting to the old parent and position
const updateOp = entry.inverse as UpdateParentOperation
@@ -963,189 +793,96 @@ export function useUndoRedo() {
const opId = crypto.randomUUID()
switch (entry.operation.type) {
case 'add-block': {
// Redo should re-apply the original add: add the block first, then edges
const inv = entry.inverse as RemoveBlockOperation
const snap = inv.data.blockSnapshot
const edgeSnapshots = inv.data.edgeSnapshots || []
const allBlockSnapshots = inv.data.allBlockSnapshots
case 'batch-add-blocks': {
const batchOp = entry.operation as BatchAddBlocksOperation
const { blockSnapshots, edgeSnapshots, subBlockValues } = batchOp.data
if (!snap || workflowStore.blocks[snap.id]) {
const blocksToAdd = blockSnapshots.filter((b) => !workflowStore.blocks[b.id])
if (blocksToAdd.length === 0) {
logger.debug('Redo batch-add-blocks skipped; all blocks exist')
break
}
// Preserve the original name from the snapshot on redo
const restoredName = snap.name
// FIRST: Add the main block (parent subflow) with subBlocks included
addToQueue({
id: opId,
operation: {
operation: 'add',
target: 'block',
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
...snap,
name: restoredName,
subBlocks: snap.subBlocks || {},
isRedo: true,
originalOpId: entry.id,
blocks: blocksToAdd,
edges: edgeSnapshots || [],
loops: {},
parallels: {},
subBlockValues: subBlockValues || {},
},
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.addBlock(
snap.id,
snap.type,
restoredName,
snap.position,
snap.data,
snap.data?.parentId,
snap.data?.extent,
{
enabled: snap.enabled,
horizontalHandles: snap.horizontalHandles,
advancedMode: snap.advancedMode,
triggerMode: snap.triggerMode,
height: snap.height,
}
)
// Set subblock values for the main block locally
if (snap.subBlocks && activeWorkflowId) {
const subblockValues: Record<string, any> = {}
Object.entries(snap.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
subblockValues[subBlockId] = subBlock.value
blocksToAdd.forEach((block) => {
workflowStore.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
})
if (Object.keys(subblockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
[snap.id]: subblockValues,
},
},
}))
}
}
// SECOND: If this is a subflow with nested blocks, restore them AFTER the parent exists
if (allBlockSnapshots) {
Object.entries(allBlockSnapshots).forEach(([id, snapNested]: [string, any]) => {
if (id !== snap.id && !workflowStore.blocks[id]) {
// Preserve original nested block name from snapshot on redo
const restoredNestedName = snapNested.name
// Add nested block locally
workflowStore.addBlock(
snapNested.id,
snapNested.type,
restoredNestedName,
snapNested.position,
snapNested.data,
snapNested.data?.parentId,
snapNested.data?.extent,
{
enabled: snapNested.enabled,
horizontalHandles: snapNested.horizontalHandles,
advancedMode: snapNested.advancedMode,
triggerMode: snapNested.triggerMode,
height: snapNested.height,
}
)
// Send to server with subBlocks included
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'add',
target: 'block',
payload: {
...snapNested,
name: restoredNestedName,
subBlocks: snapNested.subBlocks || {},
autoConnectEdge: undefined,
isRedo: true,
originalOpId: entry.id,
},
},
workflowId: activeWorkflowId,
userId,
})
// Restore subblock values for nested blocks locally
if (snapNested.subBlocks && activeWorkflowId) {
const subBlockStore = useSubBlockStore.getState()
Object.entries(snapNested.subBlocks).forEach(
([subBlockId, subBlock]: [string, any]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
subBlockStore.setValue(snapNested.id, subBlockId, subBlock.value)
}
}
)
}
}
})
}
// THIRD: Finally restore edges after all blocks exist
edgeSnapshots.forEach((edge) => {
if (!workflowStore.edges.find((e) => e.id === edge.id)) {
workflowStore.addEdge(edge)
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'add',
target: 'edge',
payload: { ...edge, isRedo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
edgeSnapshots.forEach((edge) => {
if (!workflowStore.edges.find((e) => e.id === edge.id)) {
workflowStore.addEdge(edge)
}
})
}
break
}
case 'remove-block': {
// Redo should re-apply the original remove: remove edges first, then block
const blockId = entry.operation.data.blockId
const edgesToRemove = (entry.operation as RemoveBlockOperation).data.edgeSnapshots || []
edgesToRemove.forEach((edge) => {
if (workflowStore.edges.find((e) => e.id === edge.id)) {
workflowStore.removeEdge(edge.id)
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'remove',
target: 'edge',
payload: { id: edge.id, isRedo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
}
case 'batch-remove-blocks': {
const batchOp = entry.operation as BatchRemoveBlocksOperation
const { blockSnapshots } = batchOp.data
const blockIds = blockSnapshots.map((b) => b.id)
const existingBlockIds = blockIds.filter((id) => workflowStore.blocks[id])
if (existingBlockIds.length === 0) {
logger.debug('Redo batch-remove-blocks skipped; no blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: 'batch-remove-blocks',
target: 'blocks',
payload: { ids: existingBlockIds },
},
workflowId: activeWorkflowId,
userId,
})
if (workflowStore.blocks[blockId]) {
addToQueue({
id: opId,
operation: {
operation: 'remove',
target: 'block',
payload: { id: blockId, isRedo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.removeBlock(blockId)
} else {
logger.debug('Redo remove-block skipped; block missing', { blockId })
}
existingBlockIds.forEach((id) => workflowStore.removeBlock(id))
break
}
case 'add-edge': {
@@ -1230,100 +967,6 @@ export function useUndoRedo() {
}
break
}
case 'duplicate-block': {
// Redo duplicate means re-adding the duplicated block
const dupOp = entry.operation as DuplicateBlockOperation
const { duplicatedBlockSnapshot, autoConnectEdge } = dupOp.data
if (!duplicatedBlockSnapshot || workflowStore.blocks[duplicatedBlockSnapshot.id]) {
logger.debug('Redo duplicate-block skipped', {
hasSnapshot: Boolean(duplicatedBlockSnapshot),
exists: Boolean(
duplicatedBlockSnapshot && workflowStore.blocks[duplicatedBlockSnapshot.id]
),
})
break
}
const currentBlocks = useWorkflowStore.getState().blocks
const uniqueName = getUniqueBlockName(duplicatedBlockSnapshot.name, currentBlocks)
// Add the duplicated block
addToQueue({
id: opId,
operation: {
operation: 'duplicate',
target: 'block',
payload: {
...duplicatedBlockSnapshot,
name: uniqueName,
subBlocks: duplicatedBlockSnapshot.subBlocks || {},
autoConnectEdge,
isRedo: true,
originalOpId: entry.id,
},
},
workflowId: activeWorkflowId,
userId,
})
workflowStore.addBlock(
duplicatedBlockSnapshot.id,
duplicatedBlockSnapshot.type,
uniqueName,
duplicatedBlockSnapshot.position,
duplicatedBlockSnapshot.data,
duplicatedBlockSnapshot.data?.parentId,
duplicatedBlockSnapshot.data?.extent,
{
enabled: duplicatedBlockSnapshot.enabled,
horizontalHandles: duplicatedBlockSnapshot.horizontalHandles,
advancedMode: duplicatedBlockSnapshot.advancedMode,
triggerMode: duplicatedBlockSnapshot.triggerMode,
height: duplicatedBlockSnapshot.height,
}
)
// Restore subblock values
if (duplicatedBlockSnapshot.subBlocks && activeWorkflowId) {
const subblockValues: Record<string, any> = {}
Object.entries(duplicatedBlockSnapshot.subBlocks).forEach(
([subBlockId, subBlock]: [string, any]) => {
if (subBlock.value !== null && subBlock.value !== undefined) {
subblockValues[subBlockId] = subBlock.value
}
}
)
if (Object.keys(subblockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
[duplicatedBlockSnapshot.id]: subblockValues,
},
},
}))
}
}
// Add auto-connect edge if present
if (autoConnectEdge && !workflowStore.edges.find((e) => e.id === autoConnectEdge.id)) {
workflowStore.addEdge(autoConnectEdge)
addToQueue({
id: crypto.randomUUID(),
operation: {
operation: 'add',
target: 'edge',
payload: { ...autoConnectEdge, isRedo: true, originalOpId: entry.id },
},
workflowId: activeWorkflowId,
userId,
})
}
break
}
case 'update-parent': {
// Redo parent update means applying the new parent and position
const updateOp = entry.operation as UpdateParentOperation
@@ -1726,12 +1369,11 @@ export function useUndoRedo() {
)
return {
recordAddBlock,
recordRemoveBlock,
recordBatchAddBlocks,
recordBatchRemoveBlocks,
recordAddEdge,
recordRemoveEdge,
recordMove,
recordDuplicateBlock,
recordUpdateParent,
recordApplyDiff,
recordAcceptDiff,

View File

@@ -173,6 +173,9 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
case 'block':
await handleBlockOperationTx(tx, workflowId, op, payload)
break
case 'blocks':
await handleBlocksOperationTx(tx, workflowId, op, payload)
break
case 'edge':
await handleEdgeOperationTx(tx, workflowId, op, payload)
break
@@ -216,107 +219,6 @@ async function handleBlockOperationTx(
payload: any
) {
switch (operation) {
case 'add': {
// Validate required fields for add operation
if (!payload.id || !payload.type || !payload.name || !payload.position) {
throw new Error('Missing required fields for add block operation')
}
logger.debug(`Adding block: ${payload.type} (${payload.id})`, {
isSubflowType: isSubflowBlockType(payload.type),
})
// Extract parentId and extent from payload.data if they exist there, otherwise from payload directly
const parentId = payload.parentId || payload.data?.parentId || null
const extent = payload.extent || payload.data?.extent || null
logger.debug(`Block parent info:`, {
blockId: payload.id,
hasParent: !!parentId,
parentId,
extent,
payloadParentId: payload.parentId,
dataParentId: payload.data?.parentId,
})
try {
const insertData = {
id: payload.id,
workflowId,
type: payload.type,
name: payload.name,
positionX: payload.position.x,
positionY: payload.position.y,
data: {
...(payload.data || {}),
...(parentId ? { parentId } : {}),
...(extent ? { extent } : {}),
},
subBlocks: payload.subBlocks || {},
outputs: payload.outputs || {},
enabled: payload.enabled ?? true,
horizontalHandles: payload.horizontalHandles ?? true,
advancedMode: payload.advancedMode ?? false,
triggerMode: payload.triggerMode ?? false,
height: payload.height || 0,
}
await tx.insert(workflowBlocks).values(insertData)
await insertAutoConnectEdge(tx, workflowId, payload.autoConnectEdge, logger)
} catch (insertError) {
logger.error(`❌ Failed to insert block ${payload.id}:`, insertError)
throw insertError
}
// Auto-create subflow entry for loop/parallel blocks
if (isSubflowBlockType(payload.type)) {
try {
const subflowConfig =
payload.type === SubflowType.LOOP
? {
id: payload.id,
nodes: [], // Empty initially, will be populated when child blocks are added
iterations: payload.data?.count || DEFAULT_LOOP_ITERATIONS,
loopType: payload.data?.loopType || 'for',
// Set the appropriate field based on loop type
...(payload.data?.loopType === 'while'
? { whileCondition: payload.data?.whileCondition || '' }
: payload.data?.loopType === 'doWhile'
? { doWhileCondition: payload.data?.doWhileCondition || '' }
: { forEachItems: payload.data?.collection || '' }),
}
: {
id: payload.id,
nodes: [], // Empty initially, will be populated when child blocks are added
distribution: payload.data?.collection || '',
count: payload.data?.count || DEFAULT_PARALLEL_COUNT,
parallelType: payload.data?.parallelType || 'count',
}
logger.debug(`Auto-creating ${payload.type} subflow ${payload.id}:`, subflowConfig)
await tx.insert(workflowSubflows).values({
id: payload.id,
workflowId,
type: payload.type,
config: subflowConfig,
})
} catch (subflowError) {
logger.error(`❌ Failed to create ${payload.type} subflow ${payload.id}:`, subflowError)
throw subflowError
}
}
// If this block has a parent, update the parent's subflow node list
if (parentId) {
await updateSubflowNodeList(tx, workflowId, parentId)
}
logger.debug(`Added block ${payload.id} (${payload.type}) to workflow ${workflowId}`)
break
}
case 'update-position': {
if (!payload.id || !payload.position) {
throw new Error('Missing required fields for update position operation')
@@ -342,156 +244,6 @@ async function handleBlockOperationTx(
break
}
case 'remove': {
if (!payload.id) {
throw new Error('Missing block ID for remove operation')
}
// Collect all block IDs that will be deleted (including child blocks)
const blocksToDelete = new Set<string>([payload.id])
// Check if this is a subflow block that needs cascade deletion
const blockToRemove = await tx
.select({
type: workflowBlocks.type,
parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'`,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (blockToRemove.length > 0 && isSubflowBlockType(blockToRemove[0].type)) {
// Cascade delete: Remove all child blocks first
const childBlocks = await tx
.select({ id: workflowBlocks.id, type: workflowBlocks.type })
.from(workflowBlocks)
.where(
and(
eq(workflowBlocks.workflowId, workflowId),
sql`${workflowBlocks.data}->>'parentId' = ${payload.id}`
)
)
logger.debug(
`Starting cascade deletion for subflow block ${payload.id} (type: ${blockToRemove[0].type})`
)
logger.debug(
`Found ${childBlocks.length} child blocks to delete: [${childBlocks.map((b: any) => `${b.id} (${b.type})`).join(', ')}]`
)
// Add child blocks to deletion set
childBlocks.forEach((child: { id: string; type: string }) => blocksToDelete.add(child.id))
// Remove edges connected to child blocks
for (const childBlock of childBlocks) {
await tx
.delete(workflowEdges)
.where(
and(
eq(workflowEdges.workflowId, workflowId),
or(
eq(workflowEdges.sourceBlockId, childBlock.id),
eq(workflowEdges.targetBlockId, childBlock.id)
)
)
)
}
// Remove child blocks from database
await tx
.delete(workflowBlocks)
.where(
and(
eq(workflowBlocks.workflowId, workflowId),
sql`${workflowBlocks.data}->>'parentId' = ${payload.id}`
)
)
// Remove the subflow entry
await tx
.delete(workflowSubflows)
.where(
and(eq(workflowSubflows.id, payload.id), eq(workflowSubflows.workflowId, workflowId))
)
}
// Clean up external webhooks before deleting blocks
try {
const blockIdsArray = Array.from(blocksToDelete)
const webhooksToCleanup = await tx
.select({
webhook: webhook,
workflow: {
id: workflow.id,
userId: workflow.userId,
workspaceId: workflow.workspaceId,
},
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
.where(and(eq(webhook.workflowId, workflowId), inArray(webhook.blockId, blockIdsArray)))
if (webhooksToCleanup.length > 0) {
logger.debug(
`Found ${webhooksToCleanup.length} webhook(s) to cleanup for blocks: ${blockIdsArray.join(', ')}`
)
const requestId = `socket-${workflowId}-${Date.now()}-${Math.random().toString(36).substring(7)}`
// Clean up each webhook (don't fail if cleanup fails)
for (const webhookData of webhooksToCleanup) {
try {
await cleanupExternalWebhook(webhookData.webhook, webhookData.workflow, requestId)
} catch (cleanupError) {
logger.error(`Failed to cleanup external webhook during block deletion`, {
webhookId: webhookData.webhook.id,
workflowId: webhookData.workflow.id,
userId: webhookData.workflow.userId,
workspaceId: webhookData.workflow.workspaceId,
provider: webhookData.webhook.provider,
blockId: webhookData.webhook.blockId,
error: cleanupError,
})
// Continue with deletion even if cleanup fails
}
}
}
} catch (webhookCleanupError) {
logger.error(`Error during webhook cleanup for block deletion (continuing with deletion)`, {
workflowId,
blockIds: Array.from(blocksToDelete),
error: webhookCleanupError,
})
// Continue with block deletion even if webhook cleanup fails
}
// Remove any edges connected to this block
await tx
.delete(workflowEdges)
.where(
and(
eq(workflowEdges.workflowId, workflowId),
or(
eq(workflowEdges.sourceBlockId, payload.id),
eq(workflowEdges.targetBlockId, payload.id)
)
)
)
// Finally remove the block itself
await tx
.delete(workflowBlocks)
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
// If this block had a parent, update the parent's subflow node list
if (blockToRemove.length > 0 && blockToRemove[0].parentId) {
await updateSubflowNodeList(tx, workflowId, blockToRemove[0].parentId)
}
logger.debug(`Removed block ${payload.id} and its connections from workflow ${workflowId}`)
break
}
case 'update-name': {
if (!payload.id || !payload.name) {
throw new Error('Missing required fields for update name operation')
@@ -677,114 +429,272 @@ async function handleBlockOperationTx(
break
}
case 'duplicate': {
// Validate required fields for duplicate operation
if (!payload.sourceId || !payload.id || !payload.type || !payload.name || !payload.position) {
throw new Error('Missing required fields for duplicate block operation')
}
logger.debug(`Duplicating block: ${payload.type} (${payload.sourceId} -> ${payload.id})`, {
isSubflowType: isSubflowBlockType(payload.type),
payload,
})
// Extract parentId and extent from payload
const parentId = payload.parentId || null
const extent = payload.extent || null
try {
const insertData = {
id: payload.id,
workflowId,
type: payload.type,
name: payload.name,
positionX: payload.position.x,
positionY: payload.position.y,
data: {
...(payload.data || {}),
...(parentId ? { parentId } : {}),
...(extent ? { extent } : {}),
},
subBlocks: payload.subBlocks || {},
outputs: payload.outputs || {},
enabled: payload.enabled ?? true,
horizontalHandles: payload.horizontalHandles ?? true,
advancedMode: payload.advancedMode ?? false,
triggerMode: payload.triggerMode ?? false,
height: payload.height || 0,
}
await tx.insert(workflowBlocks).values(insertData)
// Handle auto-connect edge if present
await insertAutoConnectEdge(tx, workflowId, payload.autoConnectEdge, logger)
} catch (insertError) {
logger.error(`❌ Failed to insert duplicated block ${payload.id}:`, insertError)
throw insertError
}
// Auto-create subflow entry for loop/parallel blocks
if (isSubflowBlockType(payload.type)) {
try {
const subflowConfig =
payload.type === SubflowType.LOOP
? {
id: payload.id,
nodes: [], // Empty initially, will be populated when child blocks are added
iterations: payload.data?.count || DEFAULT_LOOP_ITERATIONS,
loopType: payload.data?.loopType || 'for',
// Set the appropriate field based on loop type
...(payload.data?.loopType === 'while'
? { whileCondition: payload.data?.whileCondition || '' }
: payload.data?.loopType === 'doWhile'
? { doWhileCondition: payload.data?.doWhileCondition || '' }
: { forEachItems: payload.data?.collection || '' }),
}
: {
id: payload.id,
nodes: [], // Empty initially, will be populated when child blocks are added
distribution: payload.data?.collection || '',
}
logger.debug(
`Auto-creating ${payload.type} subflow for duplicated block ${payload.id}:`,
subflowConfig
)
await tx.insert(workflowSubflows).values({
id: payload.id,
workflowId,
type: payload.type,
config: subflowConfig,
})
} catch (subflowError) {
logger.error(
`❌ Failed to create ${payload.type} subflow for duplicated block ${payload.id}:`,
subflowError
)
throw subflowError
}
}
// If this block has a parent, update the parent's subflow node list
if (parentId) {
await updateSubflowNodeList(tx, workflowId, parentId)
}
logger.debug(
`Duplicated block ${payload.sourceId} -> ${payload.id} (${payload.type}) in workflow ${workflowId}`
)
break
}
// Add other block operations as needed
default:
logger.warn(`Unknown block operation: ${operation}`)
throw new Error(`Unsupported block operation: ${operation}`)
}
}
// Edge operations
async function handleBlocksOperationTx(
tx: any,
workflowId: string,
operation: string,
payload: any
) {
switch (operation) {
case 'batch-update-positions': {
const { updates } = payload
if (!Array.isArray(updates) || updates.length === 0) {
return
}
for (const update of updates) {
const { id, position } = update
if (!id || !position) continue
await tx
.update(workflowBlocks)
.set({
positionX: position.x,
positionY: position.y,
})
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
}
break
}
case 'batch-add-blocks': {
const { blocks, edges, loops, parallels } = payload
logger.info(`Batch adding blocks to workflow ${workflowId}`, {
blockCount: blocks?.length || 0,
edgeCount: edges?.length || 0,
loopCount: Object.keys(loops || {}).length,
parallelCount: Object.keys(parallels || {}).length,
})
if (blocks && blocks.length > 0) {
const blockValues = blocks.map((block: Record<string, unknown>) => ({
id: block.id as string,
workflowId,
type: block.type as string,
name: block.name as string,
positionX: (block.position as { x: number; y: number }).x,
positionY: (block.position as { x: number; y: number }).y,
data: (block.data as Record<string, unknown>) || {},
subBlocks: (block.subBlocks as Record<string, unknown>) || {},
outputs: (block.outputs as Record<string, unknown>) || {},
enabled: (block.enabled as boolean) ?? true,
horizontalHandles: (block.horizontalHandles as boolean) ?? true,
advancedMode: (block.advancedMode as boolean) ?? false,
triggerMode: (block.triggerMode as boolean) ?? false,
height: (block.height as number) || 0,
}))
await tx.insert(workflowBlocks).values(blockValues)
// Create subflow entries for loop/parallel blocks (skip if already in payload)
const loopIds = new Set(loops ? Object.keys(loops) : [])
const parallelIds = new Set(parallels ? Object.keys(parallels) : [])
for (const block of blocks) {
const blockId = block.id as string
if (block.type === 'loop' && !loopIds.has(blockId)) {
await tx.insert(workflowSubflows).values({
id: blockId,
workflowId,
type: 'loop',
config: {
loopType: 'for',
iterations: DEFAULT_LOOP_ITERATIONS,
nodes: [],
},
})
} else if (block.type === 'parallel' && !parallelIds.has(blockId)) {
await tx.insert(workflowSubflows).values({
id: blockId,
workflowId,
type: 'parallel',
config: {
parallelType: 'fixed',
count: DEFAULT_PARALLEL_COUNT,
nodes: [],
},
})
}
}
// Update parent subflow node lists
const parentIds = new Set<string>()
for (const block of blocks) {
const parentId = (block.data as Record<string, unknown>)?.parentId as string | undefined
if (parentId) {
parentIds.add(parentId)
}
}
for (const parentId of parentIds) {
await updateSubflowNodeList(tx, workflowId, parentId)
}
}
if (edges && edges.length > 0) {
const edgeValues = edges.map((edge: Record<string, unknown>) => ({
id: edge.id as string,
workflowId,
sourceBlockId: edge.source as string,
targetBlockId: edge.target as string,
sourceHandle: (edge.sourceHandle as string | null) || null,
targetHandle: (edge.targetHandle as string | null) || null,
}))
await tx.insert(workflowEdges).values(edgeValues)
}
if (loops && Object.keys(loops).length > 0) {
const loopValues = Object.entries(loops).map(([id, loop]) => ({
id,
workflowId,
type: 'loop',
config: loop as Record<string, unknown>,
}))
await tx.insert(workflowSubflows).values(loopValues)
}
if (parallels && Object.keys(parallels).length > 0) {
const parallelValues = Object.entries(parallels).map(([id, parallel]) => ({
id,
workflowId,
type: 'parallel',
config: parallel as Record<string, unknown>,
}))
await tx.insert(workflowSubflows).values(parallelValues)
}
logger.info(`Successfully batch added blocks to workflow ${workflowId}`)
break
}
case 'batch-remove-blocks': {
const { ids } = payload
if (!Array.isArray(ids) || ids.length === 0) {
return
}
logger.info(`Batch removing ${ids.length} blocks from workflow ${workflowId}`)
// Collect all block IDs including children of subflows
const allBlocksToDelete = new Set<string>(ids)
for (const id of ids) {
const blockToRemove = await tx
.select({ type: workflowBlocks.type })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (blockToRemove.length > 0 && isSubflowBlockType(blockToRemove[0].type)) {
const childBlocks = await tx
.select({ id: workflowBlocks.id })
.from(workflowBlocks)
.where(
and(
eq(workflowBlocks.workflowId, workflowId),
sql`${workflowBlocks.data}->>'parentId' = ${id}`
)
)
childBlocks.forEach((child: { id: string }) => allBlocksToDelete.add(child.id))
}
}
const blockIdsArray = Array.from(allBlocksToDelete)
// Collect parent IDs BEFORE deleting blocks
const parentIds = new Set<string>()
for (const id of ids) {
const parentInfo = await tx
.select({ parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'` })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (parentInfo.length > 0 && parentInfo[0].parentId) {
parentIds.add(parentInfo[0].parentId)
}
}
// Clean up external webhooks
const webhooksToCleanup = await tx
.select({
webhook: webhook,
workflow: {
id: workflow.id,
userId: workflow.userId,
workspaceId: workflow.workspaceId,
},
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
.where(and(eq(webhook.workflowId, workflowId), inArray(webhook.blockId, blockIdsArray)))
if (webhooksToCleanup.length > 0) {
const requestId = `socket-batch-${workflowId}-${Date.now()}`
for (const { webhook: wh, workflow: wf } of webhooksToCleanup) {
try {
await cleanupExternalWebhook(wh, wf, requestId)
} catch (error) {
logger.error(`Failed to cleanup webhook ${wh.id}:`, error)
}
}
}
// Delete edges connected to any of the blocks
await tx
.delete(workflowEdges)
.where(
and(
eq(workflowEdges.workflowId, workflowId),
or(
inArray(workflowEdges.sourceBlockId, blockIdsArray),
inArray(workflowEdges.targetBlockId, blockIdsArray)
)
)
)
// Delete subflow entries
await tx
.delete(workflowSubflows)
.where(
and(
eq(workflowSubflows.workflowId, workflowId),
inArray(workflowSubflows.id, blockIdsArray)
)
)
// Delete all blocks
await tx
.delete(workflowBlocks)
.where(
and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIdsArray))
)
// Update parent subflow node lists using pre-collected parent IDs
for (const parentId of parentIds) {
await updateSubflowNodeList(tx, workflowId, parentId)
}
logger.info(
`Successfully batch removed ${blockIdsArray.length} blocks from workflow ${workflowId}`
)
break
}
default:
throw new Error(`Unsupported blocks operation: ${operation}`)
}
}
async function handleEdgeOperationTx(tx: any, workflowId: string, operation: string, payload: any) {
switch (operation) {
case 'add': {
@@ -1013,53 +923,6 @@ async function handleVariableOperationTx(
break
}
case 'duplicate': {
if (!payload.sourceVariableId || !payload.id) {
throw new Error('Missing required fields for duplicate variable operation')
}
const sourceVariable = currentVariables[payload.sourceVariableId]
if (!sourceVariable) {
throw new Error(`Source variable ${payload.sourceVariableId} not found`)
}
// Create duplicated variable with unique name
const baseName = `${sourceVariable.name} (copy)`
let uniqueName = baseName
let nameIndex = 1
// Ensure name uniqueness
const existingNames = Object.values(currentVariables).map((v: any) => v.name)
while (existingNames.includes(uniqueName)) {
uniqueName = `${baseName} (${nameIndex})`
nameIndex++
}
const duplicatedVariable = {
...sourceVariable,
id: payload.id,
name: uniqueName,
}
const updatedVariables = {
...currentVariables,
[payload.id]: duplicatedVariable,
}
await tx
.update(workflow)
.set({
variables: updatedVariables,
updatedAt: new Date(),
})
.where(eq(workflow.id, workflowId))
logger.debug(
`Duplicated variable ${payload.sourceVariableId} -> ${payload.id} (${uniqueName}) in workflow ${workflowId}`
)
break
}
default:
logger.warn(`Unknown variable operation: ${operation}`)
throw new Error(`Unsupported variable operation: ${operation}`)

View File

@@ -145,7 +145,46 @@ export function setupOperationsHandlers(
return
}
if (target === 'variable' && ['add', 'remove', 'duplicate'].includes(operation)) {
if (target === 'blocks' && operation === 'batch-update-positions') {
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID(), isBatchPositionUpdate: true },
})
try {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
} catch (error) {
logger.error('Failed to persist batch position update:', error)
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: error instanceof Error ? error.message : 'Database persistence failed',
retryable: true,
})
}
}
return
}
if (target === 'variable' && ['add', 'remove'].includes(operation)) {
// Persist first, then broadcast
await persistWorkflowOperation(workflowId, {
operation,
@@ -184,7 +223,6 @@ export function setupOperationsHandlers(
}
if (target === 'workflow' && operation === 'replace-state') {
// Persist the workflow state replacement to database first
await persistWorkflowOperation(workflowId, {
operation,
target,
@@ -221,6 +259,64 @@ export function setupOperationsHandlers(
return
}
if (target === 'blocks' && operation === 'batch-add-blocks') {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (target === 'blocks' && operation === 'batch-remove-blocks') {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
// For non-position operations, persist first then broadcast
await persistWorkflowOperation(workflowId, {
operation,

View File

@@ -294,13 +294,21 @@ describe('Socket Server Index Integration', () => {
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
const validOperation = {
operation: 'add',
target: 'block',
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
id: 'test-block',
type: 'action',
name: 'Test Block',
position: { x: 100, y: 200 },
blocks: [
{
id: 'test-block',
type: 'action',
name: 'Test Block',
position: { x: 100, y: 200 },
},
],
edges: [],
loops: {},
parallels: {},
subBlockValues: {},
},
timestamp: Date.now(),
}
@@ -308,30 +316,39 @@ describe('Socket Server Index Integration', () => {
expect(() => WorkflowOperationSchema.parse(validOperation)).not.toThrow()
})
it.concurrent('should validate block operations with autoConnectEdge', async () => {
it.concurrent('should validate batch-add-blocks with edges', async () => {
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
const validOperationWithAutoEdge = {
operation: 'add',
target: 'block',
const validOperationWithEdge = {
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
id: 'test-block',
type: 'action',
name: 'Test Block',
position: { x: 100, y: 200 },
autoConnectEdge: {
id: 'auto-edge-123',
source: 'source-block',
target: 'test-block',
sourceHandle: 'output',
targetHandle: 'target',
type: 'workflowEdge',
},
blocks: [
{
id: 'test-block',
type: 'action',
name: 'Test Block',
position: { x: 100, y: 200 },
},
],
edges: [
{
id: 'auto-edge-123',
source: 'source-block',
target: 'test-block',
sourceHandle: 'output',
targetHandle: 'target',
type: 'workflowEdge',
},
],
loops: {},
parallels: {},
subBlockValues: {},
},
timestamp: Date.now(),
}
expect(() => WorkflowOperationSchema.parse(validOperationWithAutoEdge)).not.toThrow()
expect(() => WorkflowOperationSchema.parse(validOperationWithEdge)).not.toThrow()
})
it.concurrent('should validate edge operations', async () => {

View File

@@ -27,13 +27,13 @@ describe('checkRolePermission', () => {
}
})
it('should allow add operation', () => {
const result = checkRolePermission('admin', 'add')
it('should allow batch-add-blocks operation', () => {
const result = checkRolePermission('admin', 'batch-add-blocks')
expectPermissionAllowed(result)
})
it('should allow remove operation', () => {
const result = checkRolePermission('admin', 'remove')
it('should allow batch-remove-blocks operation', () => {
const result = checkRolePermission('admin', 'batch-remove-blocks')
expectPermissionAllowed(result)
})
@@ -42,8 +42,8 @@ describe('checkRolePermission', () => {
expectPermissionAllowed(result)
})
it('should allow duplicate operation', () => {
const result = checkRolePermission('admin', 'duplicate')
it('should allow batch-update-positions operation', () => {
const result = checkRolePermission('admin', 'batch-update-positions')
expectPermissionAllowed(result)
})
@@ -63,13 +63,13 @@ describe('checkRolePermission', () => {
}
})
it('should allow add operation', () => {
const result = checkRolePermission('write', 'add')
it('should allow batch-add-blocks operation', () => {
const result = checkRolePermission('write', 'batch-add-blocks')
expectPermissionAllowed(result)
})
it('should allow remove operation', () => {
const result = checkRolePermission('write', 'remove')
it('should allow batch-remove-blocks operation', () => {
const result = checkRolePermission('write', 'batch-remove-blocks')
expectPermissionAllowed(result)
})
@@ -85,14 +85,14 @@ describe('checkRolePermission', () => {
expectPermissionAllowed(result)
})
it('should deny add operation for read role', () => {
const result = checkRolePermission('read', 'add')
it('should deny batch-add-blocks operation for read role', () => {
const result = checkRolePermission('read', 'batch-add-blocks')
expectPermissionDenied(result, 'read')
expectPermissionDenied(result, 'add')
expectPermissionDenied(result, 'batch-add-blocks')
})
it('should deny remove operation for read role', () => {
const result = checkRolePermission('read', 'remove')
it('should deny batch-remove-blocks operation for read role', () => {
const result = checkRolePermission('read', 'batch-remove-blocks')
expectPermissionDenied(result, 'read')
})
@@ -101,9 +101,9 @@ describe('checkRolePermission', () => {
expectPermissionDenied(result, 'read')
})
it('should deny duplicate operation for read role', () => {
const result = checkRolePermission('read', 'duplicate')
expectPermissionDenied(result, 'read')
it('should allow batch-update-positions operation for read role', () => {
const result = checkRolePermission('read', 'batch-update-positions')
expectPermissionAllowed(result)
})
it('should deny replace-state operation for read role', () => {
@@ -117,7 +117,8 @@ describe('checkRolePermission', () => {
})
it('should deny all write operations for read role', () => {
const writeOperations = SOCKET_OPERATIONS.filter((op) => op !== 'update-position')
const readAllowedOps = ['update-position', 'batch-update-positions']
const writeOperations = SOCKET_OPERATIONS.filter((op) => !readAllowedOps.includes(op))
for (const operation of writeOperations) {
const result = checkRolePermission('read', operation)
@@ -138,7 +139,7 @@ describe('checkRolePermission', () => {
})
it('should deny operations for empty role', () => {
const result = checkRolePermission('', 'add')
const result = checkRolePermission('', 'batch-add-blocks')
expectPermissionDenied(result)
})
})
@@ -186,15 +187,21 @@ describe('checkRolePermission', () => {
it('should verify read has minimal permissions', () => {
const readOps = ROLE_ALLOWED_OPERATIONS.read
expect(readOps).toHaveLength(1)
expect(readOps).toHaveLength(2)
expect(readOps).toContain('update-position')
expect(readOps).toContain('batch-update-positions')
})
})
describe('specific operations', () => {
const testCases = [
{ operation: 'add', adminAllowed: true, writeAllowed: true, readAllowed: false },
{ operation: 'remove', adminAllowed: true, writeAllowed: true, readAllowed: false },
{ operation: 'batch-add-blocks', adminAllowed: true, writeAllowed: true, readAllowed: false },
{
operation: 'batch-remove-blocks',
adminAllowed: true,
writeAllowed: true,
readAllowed: false,
},
{ operation: 'update', adminAllowed: true, writeAllowed: true, readAllowed: false },
{ operation: 'update-position', adminAllowed: true, writeAllowed: true, readAllowed: true },
{ operation: 'update-name', adminAllowed: true, writeAllowed: true, readAllowed: false },
@@ -214,7 +221,12 @@ describe('checkRolePermission', () => {
readAllowed: false,
},
{ operation: 'toggle-handles', adminAllowed: true, writeAllowed: true, readAllowed: false },
{ operation: 'duplicate', adminAllowed: true, writeAllowed: true, readAllowed: false },
{
operation: 'batch-update-positions',
adminAllowed: true,
writeAllowed: true,
readAllowed: true,
},
{ operation: 'replace-state', adminAllowed: true, writeAllowed: true, readAllowed: false },
]
@@ -238,13 +250,13 @@ describe('checkRolePermission', () => {
describe('reason messages', () => {
it('should include role in denial reason', () => {
const result = checkRolePermission('read', 'add')
const result = checkRolePermission('read', 'batch-add-blocks')
expect(result.reason).toContain("'read'")
})
it('should include operation in denial reason', () => {
const result = checkRolePermission('read', 'add')
expect(result.reason).toContain("'add'")
const result = checkRolePermission('read', 'batch-add-blocks')
expect(result.reason).toContain("'batch-add-blocks'")
})
it('should have descriptive denial message format', () => {

View File

@@ -13,6 +13,9 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'remove',
'update',
'update-position',
'batch-update-positions',
'batch-add-blocks',
'batch-remove-blocks',
'update-name',
'toggle-enabled',
'update-parent',
@@ -20,7 +23,6 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'update-advanced-mode',
'update-trigger-mode',
'toggle-handles',
'duplicate',
'replace-state',
],
write: [
@@ -28,6 +30,9 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'remove',
'update',
'update-position',
'batch-update-positions',
'batch-add-blocks',
'batch-remove-blocks',
'update-name',
'toggle-enabled',
'update-parent',
@@ -35,10 +40,9 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'update-advanced-mode',
'update-trigger-mode',
'toggle-handles',
'duplicate',
'replace-state',
],
read: ['update-position'],
read: ['update-position', 'batch-update-positions'],
}
// Check if a role allows a specific operation (no DB query, pure logic)

View File

@@ -103,18 +103,26 @@ describe('Socket Server Integration Tests', () => {
const operationPromise = new Promise<void>((resolve) => {
client2.once('workflow-operation', (data) => {
expect(data.operation).toBe('add')
expect(data.target).toBe('block')
expect(data.payload.id).toBe('block-123')
expect(data.operation).toBe('batch-add-blocks')
expect(data.target).toBe('blocks')
expect(data.payload.blocks[0].id).toBe('block-123')
resolve()
})
})
clientSocket.emit('workflow-operation', {
workflowId,
operation: 'add',
target: 'block',
payload: { id: 'block-123', type: 'action', name: 'Test Block' },
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
blocks: [
{ id: 'block-123', type: 'action', name: 'Test Block', position: { x: 0, y: 0 } },
],
edges: [],
loops: {},
parallels: {},
subBlockValues: {},
},
timestamp: Date.now(),
})
@@ -170,9 +178,17 @@ describe('Socket Server Integration Tests', () => {
clients[0].emit('workflow-operation', {
workflowId,
operation: 'add',
target: 'block',
payload: { id: 'stress-block', type: 'action' },
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
blocks: [
{ id: 'stress-block', type: 'action', name: 'Stress Block', position: { x: 0, y: 0 } },
],
edges: [],
loops: {},
parallels: {},
subBlockValues: {},
},
timestamp: Date.now(),
})
@@ -211,7 +227,7 @@ describe('Socket Server Integration Tests', () => {
const operationsPromise = new Promise<void>((resolve) => {
client2.on('workflow-operation', (data) => {
receivedCount++
receivedOperations.add(data.payload.id)
receivedOperations.add(data.payload.blocks[0].id)
if (receivedCount === numOperations) {
resolve()
@@ -222,9 +238,22 @@ describe('Socket Server Integration Tests', () => {
for (let i = 0; i < numOperations; i++) {
clientSocket.emit('workflow-operation', {
workflowId,
operation: 'add',
target: 'block',
payload: { id: `rapid-block-${i}`, type: 'action' },
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
blocks: [
{
id: `rapid-block-${i}`,
type: 'action',
name: `Rapid Block ${i}`,
position: { x: 0, y: 0 },
},
],
edges: [],
loops: {},
parallels: {},
subBlockValues: {},
},
timestamp: Date.now(),
})
}

View File

@@ -17,8 +17,6 @@ const AutoConnectEdgeSchema = z.object({
export const BlockOperationSchema = z.object({
operation: z.enum([
'add',
'remove',
'update-position',
'update-name',
'toggle-enabled',
@@ -27,12 +25,10 @@ export const BlockOperationSchema = z.object({
'update-advanced-mode',
'update-trigger-mode',
'toggle-handles',
'duplicate',
]),
target: z.literal('block'),
payload: z.object({
id: z.string(),
sourceId: z.string().optional(), // For duplicate operations
type: z.string().optional(),
name: z.string().optional(),
position: PositionSchema.optional(),
@@ -47,7 +43,21 @@ export const BlockOperationSchema = z.object({
advancedMode: z.boolean().optional(),
triggerMode: z.boolean().optional(),
height: z.number().optional(),
autoConnectEdge: AutoConnectEdgeSchema.optional(), // Add support for auto-connect edges
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchPositionUpdateSchema = z.object({
operation: z.literal('batch-update-positions'),
target: z.literal('blocks'),
payload: z.object({
updates: z.array(
z.object({
id: z.string(),
position: PositionSchema,
})
),
}),
timestamp: z.number(),
operationId: z.string().optional(),
@@ -102,23 +112,37 @@ export const VariableOperationSchema = z.union([
timestamp: z.number(),
operationId: z.string().optional(),
}),
z.object({
operation: z.literal('duplicate'),
target: z.literal('variable'),
payload: z.object({
sourceVariableId: z.string(),
id: z.string(),
}),
timestamp: z.number(),
operationId: z.string().optional(),
}),
])
export const WorkflowStateOperationSchema = z.object({
operation: z.literal('replace-state'),
target: z.literal('workflow'),
payload: z.object({
state: z.any(), // Full workflow state
state: z.any(),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchAddBlocksSchema = z.object({
operation: z.literal('batch-add-blocks'),
target: z.literal('blocks'),
payload: z.object({
blocks: z.array(z.record(z.any())),
edges: z.array(AutoConnectEdgeSchema).optional(),
loops: z.record(z.any()).optional(),
parallels: z.record(z.any()).optional(),
subBlockValues: z.record(z.record(z.any())).optional(),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchRemoveBlocksSchema = z.object({
operation: z.literal('batch-remove-blocks'),
target: z.literal('blocks'),
payload: z.object({
ids: z.array(z.string()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
@@ -126,6 +150,9 @@ export const WorkflowStateOperationSchema = z.object({
export const WorkflowOperationSchema = z.union([
BlockOperationSchema,
BatchPositionUpdateSchema,
BatchAddBlocksSchema,
BatchRemoveBlocksSchema,
EdgeOperationSchema,
SubflowOperationSchema,
VariableOperationSchema,

View File

@@ -375,15 +375,31 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
cancelOperationsForBlock: (blockId: string) => {
logger.debug('Canceling all operations for block', { blockId })
// No debounced timeouts to cancel (moved to server-side)
// Find and cancel operation timeouts for operations related to this block
const state = get()
const operationsToCancel = state.operations.filter(
(op) =>
(op.operation.target === 'block' && op.operation.payload?.id === blockId) ||
(op.operation.target === 'subblock' && op.operation.payload?.blockId === blockId)
)
const operationsToCancel = state.operations.filter((op) => {
const { target, payload, operation } = op.operation
// Single block property updates (update-position, toggle-enabled, update-name, etc.)
if (target === 'block' && payload?.id === blockId) return true
// Subblock updates for this block
if (target === 'subblock' && payload?.blockId === blockId) return true
// Batch block operations
if (target === 'blocks') {
if (operation === 'batch-add-blocks' && Array.isArray(payload?.blocks)) {
return payload.blocks.some((b: { id: string }) => b.id === blockId)
}
if (operation === 'batch-remove-blocks' && Array.isArray(payload?.ids)) {
return payload.ids.includes(blockId)
}
if (operation === 'batch-update-positions' && Array.isArray(payload?.updates)) {
return payload.updates.some((u: { id: string }) => u.id === blockId)
}
}
return false
})
// Cancel timeouts for these operations
operationsToCancel.forEach((op) => {
@@ -401,13 +417,30 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
})
// Remove all operations for this block (both pending and processing)
const newOperations = state.operations.filter(
(op) =>
!(
(op.operation.target === 'block' && op.operation.payload?.id === blockId) ||
(op.operation.target === 'subblock' && op.operation.payload?.blockId === blockId)
)
)
const newOperations = state.operations.filter((op) => {
const { target, payload, operation } = op.operation
// Single block property updates (update-position, toggle-enabled, update-name, etc.)
if (target === 'block' && payload?.id === blockId) return false
// Subblock updates for this block
if (target === 'subblock' && payload?.blockId === blockId) return false
// Batch block operations
if (target === 'blocks') {
if (operation === 'batch-add-blocks' && Array.isArray(payload?.blocks)) {
if (payload.blocks.some((b: { id: string }) => b.id === blockId)) return false
}
if (operation === 'batch-remove-blocks' && Array.isArray(payload?.ids)) {
if (payload.ids.includes(blockId)) return false
}
if (operation === 'batch-update-positions' && Array.isArray(payload?.updates)) {
if (payload.updates.some((u: { id: string }) => u.id === blockId)) return false
}
}
return true
})
set({
operations: newOperations,

View File

@@ -283,39 +283,6 @@ export const useVariablesStore = create<VariablesStore>()(
})
},
duplicateVariable: (id, providedId?: string) => {
const state = get()
if (!state.variables[id]) return ''
const variable = state.variables[id]
const newId = providedId || crypto.randomUUID()
const workflowVariables = get().getVariablesByWorkflowId(variable.workflowId)
const baseName = `${variable.name} (copy)`
let uniqueName = baseName
let nameIndex = 1
while (workflowVariables.some((v) => v.name === uniqueName)) {
uniqueName = `${baseName} (${nameIndex})`
nameIndex++
}
set((state) => ({
variables: {
...state.variables,
[newId]: {
id: newId,
workflowId: variable.workflowId,
name: uniqueName,
type: variable.type,
value: variable.value,
},
},
}))
return newId
},
getVariablesByWorkflowId: (workflowId) => {
return Object.values(get().variables).filter((variable) => variable.workflowId === workflowId)
},

View File

@@ -43,12 +43,6 @@ export interface VariablesStore {
deleteVariable: (id: string) => void
/**
* Duplicates a variable with a "(copy)" suffix, ensuring name uniqueness
* Optionally accepts a predetermined ID for collaborative operations
*/
duplicateVariable: (id: string, providedId?: string) => string
/**
* Returns all variables for a specific workflow
*/

View File

@@ -14,7 +14,6 @@ import {
createAddBlockEntry,
createAddEdgeEntry,
createBlock,
createDuplicateBlockEntry,
createMockStorage,
createMoveBlockEntry,
createRemoveBlockEntry,
@@ -23,7 +22,7 @@ import {
} from '@sim/testing'
import { beforeEach, describe, expect, it } from 'vitest'
import { runWithUndoRedoRecordingSuspended, useUndoRedoStore } from '@/stores/undo-redo/store'
import type { DuplicateBlockOperation, UpdateParentOperation } from '@/stores/undo-redo/types'
import type { UpdateParentOperation } from '@/stores/undo-redo/types'
describe('useUndoRedoStore', () => {
const workflowId = 'wf-test'
@@ -617,63 +616,6 @@ describe('useUndoRedoStore', () => {
})
})
describe('duplicate-block operations', () => {
it('should handle duplicate-block operations', () => {
const { push, undo, redo, getStackSizes } = useUndoRedoStore.getState()
const sourceBlock = createBlock({ id: 'source-block' })
const duplicatedBlock = createBlock({ id: 'duplicated-block' })
push(
workflowId,
userId,
createDuplicateBlockEntry('source-block', 'duplicated-block', duplicatedBlock, {
workflowId,
userId,
})
)
expect(getStackSizes(workflowId, userId).undoSize).toBe(1)
const entry = undo(workflowId, userId)
expect(entry?.operation.type).toBe('duplicate-block')
expect(entry?.inverse.type).toBe('remove-block')
expect(getStackSizes(workflowId, userId).redoSize).toBe(1)
redo(workflowId, userId)
expect(getStackSizes(workflowId, userId).undoSize).toBe(1)
})
it('should store the duplicated block snapshot correctly', () => {
const { push, undo } = useUndoRedoStore.getState()
const duplicatedBlock = createBlock({
id: 'duplicated-block',
name: 'Duplicated Agent',
type: 'agent',
position: { x: 200, y: 200 },
})
push(
workflowId,
userId,
createDuplicateBlockEntry('source-block', 'duplicated-block', duplicatedBlock, {
workflowId,
userId,
})
)
const entry = undo(workflowId, userId)
const operation = entry?.operation as DuplicateBlockOperation
expect(operation.data.duplicatedBlockSnapshot).toMatchObject({
id: 'duplicated-block',
name: 'Duplicated Agent',
type: 'agent',
position: { x: 200, y: 200 },
})
})
})
describe('update-parent operations', () => {
it('should handle update-parent operations', () => {
const { push, undo, redo, getStackSizes } = useUndoRedoStore.getState()

View File

@@ -3,10 +3,11 @@ import type { Edge } from 'reactflow'
import { create } from 'zustand'
import { createJSONStorage, persist } from 'zustand/middleware'
import type {
BatchAddBlocksOperation,
BatchRemoveBlocksOperation,
MoveBlockOperation,
Operation,
OperationEntry,
RemoveBlockOperation,
RemoveEdgeOperation,
UndoRedoState,
} from '@/stores/undo-redo/types'
@@ -83,13 +84,13 @@ function isOperationApplicable(
graph: { blocksById: Record<string, BlockState>; edgesById: Record<string, Edge> }
): boolean {
switch (operation.type) {
case 'remove-block': {
const op = operation as RemoveBlockOperation
return Boolean(graph.blocksById[op.data.blockId])
case 'batch-remove-blocks': {
const op = operation as BatchRemoveBlocksOperation
return op.data.blockSnapshots.every((block) => Boolean(graph.blocksById[block.id]))
}
case 'add-block': {
const blockId = operation.data.blockId
return !graph.blocksById[blockId]
case 'batch-add-blocks': {
const op = operation as BatchAddBlocksOperation
return op.data.blockSnapshots.every((block) => !graph.blocksById[block.id])
}
case 'move-block': {
const op = operation as MoveBlockOperation
@@ -99,10 +100,6 @@ function isOperationApplicable(
const blockId = operation.data.blockId
return Boolean(graph.blocksById[blockId])
}
case 'duplicate-block': {
const duplicatedId = operation.data.duplicatedBlockId
return Boolean(graph.blocksById[duplicatedId])
}
case 'remove-edge': {
const op = operation as RemoveEdgeOperation
return Boolean(graph.edgesById[op.data.edgeId])

View File

@@ -2,15 +2,14 @@ import type { Edge } from 'reactflow'
import type { BlockState } from '@/stores/workflows/workflow/types'
export type OperationType =
| 'add-block'
| 'remove-block'
| 'batch-add-blocks'
| 'batch-remove-blocks'
| 'add-edge'
| 'remove-edge'
| 'add-subflow'
| 'remove-subflow'
| 'move-block'
| 'move-subflow'
| 'duplicate-block'
| 'update-parent'
| 'apply-diff'
| 'accept-diff'
@@ -24,20 +23,21 @@ export interface BaseOperation {
userId: string
}
export interface AddBlockOperation extends BaseOperation {
type: 'add-block'
export interface BatchAddBlocksOperation extends BaseOperation {
type: 'batch-add-blocks'
data: {
blockId: string
blockSnapshots: BlockState[]
edgeSnapshots: Edge[]
subBlockValues: Record<string, Record<string, unknown>>
}
}
export interface RemoveBlockOperation extends BaseOperation {
type: 'remove-block'
export interface BatchRemoveBlocksOperation extends BaseOperation {
type: 'batch-remove-blocks'
data: {
blockId: string
blockSnapshot: BlockState | null
edgeSnapshots?: Edge[]
allBlockSnapshots?: Record<string, BlockState>
blockSnapshots: BlockState[]
edgeSnapshots: Edge[]
subBlockValues: Record<string, Record<string, unknown>>
}
}
@@ -103,16 +103,6 @@ export interface MoveSubflowOperation extends BaseOperation {
}
}
export interface DuplicateBlockOperation extends BaseOperation {
type: 'duplicate-block'
data: {
sourceBlockId: string
duplicatedBlockId: string
duplicatedBlockSnapshot: BlockState
autoConnectEdge?: Edge
}
}
export interface UpdateParentOperation extends BaseOperation {
type: 'update-parent'
data: {
@@ -155,15 +145,14 @@ export interface RejectDiffOperation extends BaseOperation {
}
export type Operation =
| AddBlockOperation
| RemoveBlockOperation
| BatchAddBlocksOperation
| BatchRemoveBlocksOperation
| AddEdgeOperation
| RemoveEdgeOperation
| AddSubflowOperation
| RemoveSubflowOperation
| MoveBlockOperation
| MoveSubflowOperation
| DuplicateBlockOperation
| UpdateParentOperation
| ApplyDiffOperation
| AcceptDiffOperation

View File

@@ -1,4 +1,9 @@
import type { Operation, OperationEntry } from '@/stores/undo-redo/types'
import type {
BatchAddBlocksOperation,
BatchRemoveBlocksOperation,
Operation,
OperationEntry,
} from '@/stores/undo-redo/types'
export function createOperationEntry(operation: Operation, inverse: Operation): OperationEntry {
return {
@@ -11,25 +16,31 @@ export function createOperationEntry(operation: Operation, inverse: Operation):
export function createInverseOperation(operation: Operation): Operation {
switch (operation.type) {
case 'add-block':
case 'batch-add-blocks': {
const op = operation as BatchAddBlocksOperation
return {
...operation,
type: 'remove-block',
type: 'batch-remove-blocks',
data: {
blockId: operation.data.blockId,
blockSnapshot: null,
edgeSnapshots: [],
blockSnapshots: op.data.blockSnapshots,
edgeSnapshots: op.data.edgeSnapshots,
subBlockValues: op.data.subBlockValues,
},
}
} as BatchRemoveBlocksOperation
}
case 'remove-block':
case 'batch-remove-blocks': {
const op = operation as BatchRemoveBlocksOperation
return {
...operation,
type: 'add-block',
type: 'batch-add-blocks',
data: {
blockId: operation.data.blockId,
blockSnapshots: op.data.blockSnapshots,
edgeSnapshots: op.data.edgeSnapshots,
subBlockValues: op.data.subBlockValues,
},
}
} as BatchAddBlocksOperation
}
case 'add-edge':
return {
@@ -89,17 +100,6 @@ export function createInverseOperation(operation: Operation): Operation {
},
}
case 'duplicate-block':
return {
...operation,
type: 'remove-block',
data: {
blockId: operation.data.duplicatedBlockId,
blockSnapshot: operation.data.duplicatedBlockSnapshot,
edgeSnapshots: [],
},
}
case 'update-parent':
return {
...operation,
@@ -147,7 +147,7 @@ export function createInverseOperation(operation: Operation): Operation {
default: {
const exhaustiveCheck: never = operation
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as any).type}`)
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`)
}
}
}
@@ -155,22 +155,32 @@ export function createInverseOperation(operation: Operation): Operation {
export function operationToCollaborativePayload(operation: Operation): {
operation: string
target: string
payload: any
payload: Record<string, unknown>
} {
switch (operation.type) {
case 'add-block':
case 'batch-add-blocks': {
const op = operation as BatchAddBlocksOperation
return {
operation: 'add',
target: 'block',
payload: { id: operation.data.blockId },
operation: 'batch-add-blocks',
target: 'blocks',
payload: {
blocks: op.data.blockSnapshots,
edges: op.data.edgeSnapshots,
loops: {},
parallels: {},
subBlockValues: op.data.subBlockValues,
},
}
}
case 'remove-block':
case 'batch-remove-blocks': {
const op = operation as BatchRemoveBlocksOperation
return {
operation: 'remove',
target: 'block',
payload: { id: operation.data.blockId },
operation: 'batch-remove-blocks',
target: 'blocks',
payload: { ids: op.data.blockSnapshots.map((b) => b.id) },
}
}
case 'add-edge':
return {
@@ -223,16 +233,6 @@ export function operationToCollaborativePayload(operation: Operation): {
},
}
case 'duplicate-block':
return {
operation: 'duplicate',
target: 'block',
payload: {
sourceId: operation.data.sourceBlockId,
duplicatedId: operation.data.duplicatedBlockId,
},
}
case 'update-parent':
return {
operation: 'update-parent',
@@ -274,7 +274,7 @@ export function operationToCollaborativePayload(operation: Operation): {
default: {
const exhaustiveCheck: never = operation
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as any).type}`)
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`)
}
}
}

View File

@@ -381,37 +381,6 @@ export const useVariablesStore = create<VariablesStore>()(
})
},
duplicateVariable: (id, providedId) => {
const state = get()
const existing = state.variables[id]
if (!existing) return ''
const newId = providedId || uuidv4()
const workflowVariables = state.getVariablesByWorkflowId(existing.workflowId)
const baseName = `${existing.name} (copy)`
let uniqueName = baseName
let nameIndex = 1
while (workflowVariables.some((v) => v.name === uniqueName)) {
uniqueName = `${baseName} (${nameIndex})`
nameIndex++
}
set((state) => ({
variables: {
...state.variables,
[newId]: {
id: newId,
workflowId: existing.workflowId,
name: uniqueName,
type: existing.type,
value: existing.value,
},
},
}))
return newId
},
getVariablesByWorkflowId: (workflowId) => {
return Object.values(get().variables).filter((v) => v.workflowId === workflowId)
},

View File

@@ -57,6 +57,5 @@ export interface VariablesStore {
addVariable: (variable: Omit<Variable, 'id'>, providedId?: string) => string
updateVariable: (id: string, update: Partial<Omit<Variable, 'id' | 'workflowId'>>) => void
deleteVariable: (id: string) => void
duplicateVariable: (id: string, providedId?: string) => string
getVariablesByWorkflowId: (workflowId: string) => Variable[]
}

View File

@@ -1,119 +1,9 @@
import { createLogger } from '@sim/logger'
import { v4 as uuidv4 } from 'uuid'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
import { regenerateWorkflowIds } from '@/stores/workflows/utils'
import type { WorkflowState } from '../workflow/types'
const logger = createLogger('WorkflowJsonImporter')
/**
* Generate new IDs for all blocks and edges to avoid conflicts
*/
function regenerateIds(workflowState: WorkflowState): WorkflowState {
const { metadata, variables } = workflowState
const blockIdMap = new Map<string, string>()
const newBlocks: WorkflowState['blocks'] = {}
// First pass: create new IDs for all blocks
Object.entries(workflowState.blocks).forEach(([oldId, block]) => {
const newId = uuidv4()
blockIdMap.set(oldId, newId)
newBlocks[newId] = {
...block,
id: newId,
}
})
// Second pass: update edges with new block IDs
const newEdges = workflowState.edges.map((edge) => ({
...edge,
id: uuidv4(), // Generate new edge ID
source: blockIdMap.get(edge.source) || edge.source,
target: blockIdMap.get(edge.target) || edge.target,
}))
// Third pass: update loops with new block IDs
// CRITICAL: Loop IDs must match their block IDs (loops are keyed by their block ID)
const newLoops: WorkflowState['loops'] = {}
if (workflowState.loops) {
Object.entries(workflowState.loops).forEach(([oldLoopId, loop]) => {
// Map the loop ID using the block ID mapping (loop ID = block ID)
const newLoopId = blockIdMap.get(oldLoopId) || oldLoopId
newLoops[newLoopId] = {
...loop,
id: newLoopId,
nodes: loop.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
}
// Fourth pass: update parallels with new block IDs
// CRITICAL: Parallel IDs must match their block IDs (parallels are keyed by their block ID)
const newParallels: WorkflowState['parallels'] = {}
if (workflowState.parallels) {
Object.entries(workflowState.parallels).forEach(([oldParallelId, parallel]) => {
// Map the parallel ID using the block ID mapping (parallel ID = block ID)
const newParallelId = blockIdMap.get(oldParallelId) || oldParallelId
newParallels[newParallelId] = {
...parallel,
id: newParallelId,
nodes: parallel.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
}
// Fifth pass: update any block references in subblock values and clear runtime trigger values
Object.entries(newBlocks).forEach(([blockId, block]) => {
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(subBlockId)) {
block.subBlocks[subBlockId] = {
...subBlock,
value: null,
}
return
}
if (subBlock.value && typeof subBlock.value === 'string') {
// Replace any block references in the value
let updatedValue = subBlock.value
blockIdMap.forEach((newId, oldId) => {
// Replace references like <blockId.output> with new IDs
const regex = new RegExp(`<${oldId}\\.`, 'g')
updatedValue = updatedValue.replace(regex, `<${newId}.`)
})
block.subBlocks[subBlockId] = {
...subBlock,
value: updatedValue,
}
}
})
}
// Update parentId references in block.data
if (block.data?.parentId) {
const newParentId = blockIdMap.get(block.data.parentId)
if (newParentId) {
block.data.parentId = newParentId
} else {
// Parent ID not in mapping - this shouldn't happen but log it
logger.warn(`Block ${blockId} references unmapped parent ${block.data.parentId}`)
// Remove invalid parent reference
block.data.parentId = undefined
block.data.extent = undefined
}
}
})
return {
blocks: newBlocks,
edges: newEdges,
loops: newLoops,
parallels: newParallels,
metadata,
variables,
}
}
/**
* Normalize subblock values by converting empty strings to null.
* This provides backwards compatibility for workflows exported before the null sanitization fix,
@@ -260,9 +150,10 @@ export function parseWorkflowJson(
variables: Array.isArray(workflowData.variables) ? workflowData.variables : undefined,
}
// Regenerate IDs if requested (default: true)
if (regenerateIdsFlag) {
const regeneratedState = regenerateIds(workflowState)
const { idMap: _, ...regeneratedState } = regenerateWorkflowIds(workflowState, {
clearTriggerRuntimeValues: true,
})
workflowState = {
...regeneratedState,
metadata: workflowState.metadata,

View File

@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { withOptimisticUpdate } from '@/lib/core/utils/optimistic-update'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
import { useVariablesStore } from '@/stores/panel/variables/store'
import type {
@@ -12,7 +13,9 @@ import type {
} from '@/stores/workflows/registry/types'
import { getNextWorkflowColor } from '@/stores/workflows/registry/utils'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getUniqueBlockName, regenerateBlockIds } from '@/stores/workflows/utils'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowRegistry')
const initialHydration: HydrationState = {
@@ -70,12 +73,12 @@ function setWorkspaceTransitioning(isTransitioning: boolean): void {
export const useWorkflowRegistry = create<WorkflowRegistry>()(
devtools(
(set, get) => ({
// Store state
workflows: {},
activeWorkflowId: null,
error: null,
deploymentStatuses: {},
hydration: initialHydration,
clipboard: null,
beginMetadataLoad: (workspaceId: string) => {
set((state) => ({
@@ -772,10 +775,104 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
deploymentStatuses: {},
error: null,
hydration: initialHydration,
clipboard: null,
})
logger.info('Logout complete - all workflow data cleared')
},
copyBlocks: (blockIds: string[]) => {
if (blockIds.length === 0) return
const workflowStore = useWorkflowStore.getState()
const activeWorkflowId = get().activeWorkflowId
const subBlockStore = useSubBlockStore.getState()
const copiedBlocks: Record<string, BlockState> = {}
const copiedSubBlockValues: Record<string, Record<string, unknown>> = {}
const blockIdSet = new Set(blockIds)
// Auto-include nested nodes from selected subflows
blockIds.forEach((blockId) => {
const loop = workflowStore.loops[blockId]
if (loop?.nodes) loop.nodes.forEach((n) => blockIdSet.add(n))
const parallel = workflowStore.parallels[blockId]
if (parallel?.nodes) parallel.nodes.forEach((n) => blockIdSet.add(n))
})
blockIdSet.forEach((blockId) => {
const block = workflowStore.blocks[blockId]
if (block) {
copiedBlocks[blockId] = JSON.parse(JSON.stringify(block))
if (activeWorkflowId) {
const blockValues = subBlockStore.workflowValues[activeWorkflowId]?.[blockId]
if (blockValues) {
copiedSubBlockValues[blockId] = JSON.parse(JSON.stringify(blockValues))
}
}
}
})
const copiedEdges = workflowStore.edges.filter(
(edge) => blockIdSet.has(edge.source) && blockIdSet.has(edge.target)
)
const copiedLoops: Record<string, Loop> = {}
Object.entries(workflowStore.loops).forEach(([loopId, loop]) => {
if (blockIdSet.has(loopId)) {
copiedLoops[loopId] = JSON.parse(JSON.stringify(loop))
}
})
const copiedParallels: Record<string, Parallel> = {}
Object.entries(workflowStore.parallels).forEach(([parallelId, parallel]) => {
if (blockIdSet.has(parallelId)) {
copiedParallels[parallelId] = JSON.parse(JSON.stringify(parallel))
}
})
set({
clipboard: {
blocks: copiedBlocks,
edges: copiedEdges,
subBlockValues: copiedSubBlockValues,
loops: copiedLoops,
parallels: copiedParallels,
timestamp: Date.now(),
},
})
logger.info('Copied blocks to clipboard', { count: Object.keys(copiedBlocks).length })
},
preparePasteData: (positionOffset = DEFAULT_DUPLICATE_OFFSET) => {
const { clipboard, activeWorkflowId } = get()
if (!clipboard || Object.keys(clipboard.blocks).length === 0) return null
if (!activeWorkflowId) return null
const workflowStore = useWorkflowStore.getState()
const { blocks, edges, loops, parallels, subBlockValues } = regenerateBlockIds(
clipboard.blocks,
clipboard.edges,
clipboard.loops,
clipboard.parallels,
clipboard.subBlockValues,
positionOffset,
workflowStore.blocks,
getUniqueBlockName
)
return { blocks, edges, loops, parallels, subBlockValues }
},
hasClipboard: () => {
const { clipboard } = get()
return clipboard !== null && Object.keys(clipboard.blocks).length > 0
},
clearClipboard: () => {
set({ clipboard: null })
},
}),
{ name: 'workflow-registry' }
)

View File

@@ -1,3 +1,6 @@
import type { Edge } from 'reactflow'
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
export interface DeploymentStatus {
isDeployed: boolean
deployedAt?: Date
@@ -5,6 +8,15 @@ export interface DeploymentStatus {
needsRedeployment?: boolean
}
export interface ClipboardData {
blocks: Record<string, BlockState>
edges: Edge[]
subBlockValues: Record<string, Record<string, unknown>>
loops: Record<string, Loop>
parallels: Record<string, Parallel>
timestamp: number
}
export interface WorkflowMetadata {
id: string
name: string
@@ -38,6 +50,7 @@ export interface WorkflowRegistryState {
error: string | null
deploymentStatuses: Record<string, DeploymentStatus>
hydration: HydrationState
clipboard: ClipboardData | null
}
export interface WorkflowRegistryActions {
@@ -58,6 +71,17 @@ export interface WorkflowRegistryActions {
apiKey?: string
) => void
setWorkflowNeedsRedeployment: (workflowId: string | null, needsRedeployment: boolean) => void
copyBlocks: (blockIds: string[]) => void
preparePasteData: (positionOffset?: { x: number; y: number }) => {
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
subBlockValues: Record<string, Record<string, unknown>>
} | null
hasClipboard: () => boolean
clearClipboard: () => void
logout: () => void
}
export type WorkflowRegistry = WorkflowRegistryState & WorkflowRegistryActions

View File

@@ -1,9 +1,31 @@
import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { getBlock } from '@/blocks'
import { normalizeName } from '@/executor/constants'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import type { BlockState, SubBlockState } from '@/stores/workflows/workflow/types'
import type {
BlockState,
Loop,
Parallel,
Position,
SubBlockState,
WorkflowState,
} from '@/stores/workflows/workflow/types'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
const WEBHOOK_SUBBLOCK_FIELDS = ['webhookId', 'triggerPath']
export { normalizeName }
export interface RegeneratedState {
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
idMap: Map<string, string>
}
/**
* Generates a unique block name by finding the highest number suffix among existing blocks
* with the same base name and incrementing it
@@ -44,6 +66,166 @@ export function getUniqueBlockName(baseName: string, existingBlocks: Record<stri
return `${namePrefix} ${maxNumber + 1}`
}
export interface PrepareBlockStateOptions {
id: string
type: string
name: string
position: Position
data?: Record<string, unknown>
parentId?: string
extent?: 'parent'
triggerMode?: boolean
}
/**
* Prepares a BlockState object from block type and configuration.
* Generates subBlocks and outputs from the block registry.
*/
export function prepareBlockState(options: PrepareBlockStateOptions): BlockState {
const { id, type, name, position, data, parentId, extent, triggerMode = false } = options
const blockConfig = getBlock(type)
const blockData: Record<string, unknown> = { ...(data || {}) }
if (parentId) blockData.parentId = parentId
if (extent) blockData.extent = extent
if (!blockConfig) {
return {
id,
type,
name,
position,
data: blockData,
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode,
height: 0,
}
}
const subBlocks: Record<string, SubBlockState> = {}
if (blockConfig.subBlocks) {
blockConfig.subBlocks.forEach((subBlock) => {
let initialValue: unknown = null
if (typeof subBlock.value === 'function') {
try {
initialValue = subBlock.value({})
} catch {
initialValue = null
}
} else if (subBlock.defaultValue !== undefined) {
initialValue = subBlock.defaultValue
} else if (subBlock.type === 'input-format') {
initialValue = [
{
id: crypto.randomUUID(),
name: '',
type: 'string',
value: '',
collapsed: false,
},
]
} else if (subBlock.type === 'table') {
initialValue = []
}
subBlocks[subBlock.id] = {
id: subBlock.id,
type: subBlock.type,
value: initialValue as SubBlockState['value'],
}
})
}
const outputs = getBlockOutputs(type, subBlocks, triggerMode)
return {
id,
type,
name,
position,
data: blockData,
subBlocks,
outputs,
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode,
height: 0,
}
}
export interface PrepareDuplicateBlockStateOptions {
sourceBlock: BlockState
newId: string
newName: string
positionOffset: { x: number; y: number }
subBlockValues: Record<string, unknown>
}
/**
* Prepares a BlockState for duplicating an existing block.
* Copies block structure and subblock values, excluding webhook fields.
*/
export function prepareDuplicateBlockState(options: PrepareDuplicateBlockStateOptions): {
block: BlockState
subBlockValues: Record<string, unknown>
} {
const { sourceBlock, newId, newName, positionOffset, subBlockValues } = options
const filteredSubBlockValues = Object.fromEntries(
Object.entries(subBlockValues).filter(([key]) => !WEBHOOK_SUBBLOCK_FIELDS.includes(key))
)
const mergedSubBlocks: Record<string, SubBlockState> = sourceBlock.subBlocks
? JSON.parse(JSON.stringify(sourceBlock.subBlocks))
: {}
WEBHOOK_SUBBLOCK_FIELDS.forEach((field) => {
if (field in mergedSubBlocks) {
delete mergedSubBlocks[field]
}
})
Object.entries(filteredSubBlockValues).forEach(([subblockId, value]) => {
if (mergedSubBlocks[subblockId]) {
mergedSubBlocks[subblockId].value = value as SubBlockState['value']
} else {
mergedSubBlocks[subblockId] = {
id: subblockId,
type: 'short-input',
value: value as SubBlockState['value'],
}
}
})
const block: BlockState = {
id: newId,
type: sourceBlock.type,
name: newName,
position: {
x: sourceBlock.position.x + positionOffset.x,
y: sourceBlock.position.y + positionOffset.y,
},
data: sourceBlock.data ? JSON.parse(JSON.stringify(sourceBlock.data)) : {},
subBlocks: mergedSubBlocks,
outputs: sourceBlock.outputs ? JSON.parse(JSON.stringify(sourceBlock.outputs)) : {},
enabled: sourceBlock.enabled ?? true,
horizontalHandles: sourceBlock.horizontalHandles ?? true,
advancedMode: sourceBlock.advancedMode ?? false,
triggerMode: sourceBlock.triggerMode ?? false,
height: sourceBlock.height || 0,
}
return { block, subBlockValues: filteredSubBlockValues }
}
/**
* Merges workflow block states with subblock values while maintaining block structure
* @param blocks - Block configurations from workflow store
@@ -211,6 +393,217 @@ export async function mergeSubblockStateAsync(
})
)
// Convert entries back to an object
return Object.fromEntries(processedBlockEntries) as Record<string, BlockState>
}
function updateValueReferences(value: unknown, nameMap: Map<string, string>): unknown {
if (typeof value === 'string') {
let updatedValue = value
nameMap.forEach((newName, oldName) => {
const regex = new RegExp(`<${oldName}\\.`, 'g')
updatedValue = updatedValue.replace(regex, `<${newName}.`)
})
return updatedValue
}
if (Array.isArray(value)) {
return value.map((item) => updateValueReferences(item, nameMap))
}
if (value && typeof value === 'object') {
const result: Record<string, unknown> = {}
for (const [key, val] of Object.entries(value)) {
result[key] = updateValueReferences(val, nameMap)
}
return result
}
return value
}
function updateBlockReferences(
blocks: Record<string, BlockState>,
idMap: Map<string, string>,
nameMap: Map<string, string>,
clearTriggerRuntimeValues = false
): void {
Object.entries(blocks).forEach(([_, block]) => {
if (block.data?.parentId) {
const newParentId = idMap.get(block.data.parentId)
if (newParentId) {
block.data = { ...block.data, parentId: newParentId }
} else {
block.data = { ...block.data, parentId: undefined, extent: undefined }
}
}
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
if (clearTriggerRuntimeValues && TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(subBlockId)) {
block.subBlocks[subBlockId] = { ...subBlock, value: null }
return
}
if (subBlock.value !== undefined && subBlock.value !== null) {
const updatedValue = updateValueReferences(
subBlock.value,
nameMap
) as SubBlockState['value']
block.subBlocks[subBlockId] = { ...subBlock, value: updatedValue }
}
})
}
})
}
export function regenerateWorkflowIds(
workflowState: WorkflowState,
options: { clearTriggerRuntimeValues?: boolean } = {}
): WorkflowState & { idMap: Map<string, string> } {
const { clearTriggerRuntimeValues = true } = options
const blockIdMap = new Map<string, string>()
const nameMap = new Map<string, string>()
const newBlocks: Record<string, BlockState> = {}
Object.entries(workflowState.blocks).forEach(([oldId, block]) => {
const newId = uuidv4()
blockIdMap.set(oldId, newId)
const oldNormalizedName = normalizeName(block.name)
nameMap.set(oldNormalizedName, oldNormalizedName)
newBlocks[newId] = { ...block, id: newId }
})
const newEdges = workflowState.edges.map((edge) => ({
...edge,
id: uuidv4(),
source: blockIdMap.get(edge.source) || edge.source,
target: blockIdMap.get(edge.target) || edge.target,
}))
const newLoops: Record<string, Loop> = {}
if (workflowState.loops) {
Object.entries(workflowState.loops).forEach(([oldLoopId, loop]) => {
const newLoopId = blockIdMap.get(oldLoopId) || oldLoopId
newLoops[newLoopId] = {
...loop,
id: newLoopId,
nodes: loop.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
}
const newParallels: Record<string, Parallel> = {}
if (workflowState.parallels) {
Object.entries(workflowState.parallels).forEach(([oldParallelId, parallel]) => {
const newParallelId = blockIdMap.get(oldParallelId) || oldParallelId
newParallels[newParallelId] = {
...parallel,
id: newParallelId,
nodes: parallel.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
}
updateBlockReferences(newBlocks, blockIdMap, nameMap, clearTriggerRuntimeValues)
return {
blocks: newBlocks,
edges: newEdges,
loops: newLoops,
parallels: newParallels,
metadata: workflowState.metadata,
variables: workflowState.variables,
idMap: blockIdMap,
}
}
export function regenerateBlockIds(
blocks: Record<string, BlockState>,
edges: Edge[],
loops: Record<string, Loop>,
parallels: Record<string, Parallel>,
subBlockValues: Record<string, Record<string, unknown>>,
positionOffset: { x: number; y: number },
existingBlockNames: Record<string, BlockState>,
uniqueNameFn: (name: string, blocks: Record<string, BlockState>) => string
): RegeneratedState & { subBlockValues: Record<string, Record<string, unknown>> } {
const blockIdMap = new Map<string, string>()
const nameMap = new Map<string, string>()
const newBlocks: Record<string, BlockState> = {}
const newSubBlockValues: Record<string, Record<string, unknown>> = {}
// Track all blocks for name uniqueness (existing + newly processed)
const allBlocksForNaming = { ...existingBlockNames }
Object.entries(blocks).forEach(([oldId, block]) => {
const newId = uuidv4()
blockIdMap.set(oldId, newId)
const oldNormalizedName = normalizeName(block.name)
const newName = uniqueNameFn(block.name, allBlocksForNaming)
const newNormalizedName = normalizeName(newName)
nameMap.set(oldNormalizedName, newNormalizedName)
const isNested = !!block.data?.parentId
const newBlock: BlockState = {
...block,
id: newId,
name: newName,
position: isNested
? block.position
: {
x: block.position.x + positionOffset.x,
y: block.position.y + positionOffset.y,
},
}
newBlocks[newId] = newBlock
// Add to tracking so next block gets unique name
allBlocksForNaming[newId] = newBlock
if (subBlockValues[oldId]) {
newSubBlockValues[newId] = JSON.parse(JSON.stringify(subBlockValues[oldId]))
}
})
const newEdges = edges.map((edge) => ({
...edge,
id: uuidv4(),
source: blockIdMap.get(edge.source) || edge.source,
target: blockIdMap.get(edge.target) || edge.target,
}))
const newLoops: Record<string, Loop> = {}
Object.entries(loops).forEach(([oldLoopId, loop]) => {
const newLoopId = blockIdMap.get(oldLoopId) || oldLoopId
newLoops[newLoopId] = {
...loop,
id: newLoopId,
nodes: loop.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
const newParallels: Record<string, Parallel> = {}
Object.entries(parallels).forEach(([oldParallelId, parallel]) => {
const newParallelId = blockIdMap.get(oldParallelId) || oldParallelId
newParallels[newParallelId] = {
...parallel,
id: newParallelId,
nodes: parallel.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
}
})
updateBlockReferences(newBlocks, blockIdMap, nameMap, false)
Object.entries(newSubBlockValues).forEach(([_, blockValues]) => {
Object.keys(blockValues).forEach((subBlockId) => {
blockValues[subBlockId] = updateValueReferences(blockValues[subBlockId], nameMap)
})
})
return {
blocks: newBlocks,
edges: newEdges,
loops: newLoops,
parallels: newParallels,
subBlockValues: newSubBlockValues,
idMap: blockIdMap,
}
}