improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations (#2738)

* improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations

* feat(i18n): update translations (#2732)

Co-authored-by: icecrasher321 <icecrasher321@users.noreply.github.com>

* don't allow flip handles for subflows

* ack PR comments

* more

* fix missing handler

* remove dead subflow-specific ops

* remove unused code

* fixed subflow ops

* keep edges on subflow actions intact

* fix subflow resizing

* fix remove from subflow bulk

* improvement(canvas): add multi-block select, add batch handle, enabled, and edge operations

* don't allow flip handles for subflows

* ack PR comments

* more

* fix missing handler

* remove dead subflow-specific ops

* remove unused code

* fixed subflow ops

* fix subflow resizing

* keep edges on subflow actions intact

* fixed copy from inside subflow

* types improvement, preview fixes

* fetch varible data in deploy modal

* moved remove from subflow one position to the right

* fix subflow issues

* address greptile comment

* fix test

* improvement(preview): ui/ux

* fix(preview): subflows

* added batch add edges

* removed recovery

* use consolidated consts for sockets operations

* more

---------

Co-authored-by: icecrasher321 <icecrasher321@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Emir Karabeg <emirkarabeg@berkeley.edu>
This commit is contained in:
Waleed
2026-01-09 14:48:23 -08:00
committed by GitHub
parent 753600ed60
commit 05bbf34265
110 changed files with 5943 additions and 2418 deletions

View File

@@ -767,7 +767,7 @@ export default function PrivacyPolicy() {
privacy@sim.ai
</Link>
</li>
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94133, USA</li>
<li>Mailing Address: Sim, 80 Langton St, San Francisco, CA 94103, USA</li>
</ul>
<p>We will respond to your request within a reasonable timeframe.</p>
</section>

View File

@@ -42,6 +42,40 @@
animation: dash-animation 1.5s linear infinite !important;
}
/**
* React Flow selection box styling
* Uses brand-secondary color for selection highlighting
*/
.react-flow__selection {
background: rgba(51, 180, 255, 0.08) !important;
border: 1px solid var(--brand-secondary) !important;
}
.react-flow__nodesselection-rect,
.react-flow__nodesselection {
background: transparent !important;
border: none !important;
pointer-events: none !important;
}
/**
* Selected node ring indicator
* Uses a pseudo-element overlay to match the original behavior (absolute inset-0 z-40)
*/
.react-flow__node.selected > div > div {
position: relative;
}
.react-flow__node.selected > div > div::after {
content: "";
position: absolute;
inset: 0;
z-index: 40;
border-radius: 8px;
box-shadow: 0 0 0 1.75px var(--brand-secondary);
pointer-events: none;
}
/**
* Color tokens - single source of truth for all colors
* Light mode: Warm theme

View File

@@ -253,7 +253,7 @@ export async function POST(
userId: deployment.userId,
workspaceId,
isDeployed: workflowRecord?.isDeployed ?? false,
variables: workflowRecord?.variables || {},
variables: (workflowRecord?.variables as Record<string, unknown>) ?? undefined,
}
const stream = await createStreamingResponse({

View File

@@ -10,6 +10,7 @@ import {
extractRequiredCredentials,
sanitizeCredentials,
} from '@/lib/workflows/credentials/credential-extractor'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('TemplateByIdAPI')
@@ -189,12 +190,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.where(eq(workflow.id, template.workflowId))
.limit(1)
const currentState = {
const currentState: Partial<WorkflowState> = {
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
variables: workflowRecord?.variables || undefined,
variables: (workflowRecord?.variables as WorkflowState['variables']) ?? undefined,
lastSaved: Date.now(),
}

View File

@@ -7,7 +7,10 @@ import { v4 as uuidv4 } from 'uuid'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { regenerateWorkflowStateIds } from '@/lib/workflows/persistence/utils'
import {
type RegenerateStateInput,
regenerateWorkflowStateIds,
} from '@/lib/workflows/persistence/utils'
const logger = createLogger('TemplateUseAPI')
@@ -104,9 +107,10 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
// Step 2: Regenerate IDs when creating a copy (not when connecting/editing template)
// When connecting to template (edit mode), keep original IDs
// When using template (copy mode), regenerate all IDs to avoid conflicts
const templateState = templateData.state as RegenerateStateInput
const workflowState = connectToTemplate
? templateData.state
: regenerateWorkflowStateIds(templateData.state)
? templateState
: regenerateWorkflowStateIds(templateState)
// Step 3: Save the workflow state using the existing state endpoint (like imports do)
// Ensure variables in state are remapped for the new workflow as well

View File

@@ -243,7 +243,7 @@ export interface WorkflowExportState {
color?: string
exportedAt?: string
}
variables?: WorkflowVariable[]
variables?: Record<string, WorkflowVariable>
}
export interface WorkflowExportPayload {
@@ -317,36 +317,44 @@ export interface WorkspaceImportResponse {
// =============================================================================
/**
* Parse workflow variables from database JSON format to array format.
* Handles both array and Record<string, Variable> formats.
* Parse workflow variables from database JSON format to Record format.
* Handles both legacy Array and current Record<string, Variable> formats.
*/
export function parseWorkflowVariables(
dbVariables: DbWorkflow['variables']
): WorkflowVariable[] | undefined {
): Record<string, WorkflowVariable> | undefined {
if (!dbVariables) return undefined
try {
const varsObj = typeof dbVariables === 'string' ? JSON.parse(dbVariables) : dbVariables
// Handle legacy Array format by converting to Record
if (Array.isArray(varsObj)) {
return varsObj.map((v) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
const result: Record<string, WorkflowVariable> = {}
for (const v of varsObj) {
result[v.id] = {
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}
}
return result
}
// Already Record format - normalize and return
if (typeof varsObj === 'object' && varsObj !== null) {
return Object.values(varsObj).map((v: unknown) => {
const result: Record<string, WorkflowVariable> = {}
for (const [key, v] of Object.entries(varsObj)) {
const variable = v as { id: string; name: string; type: VariableType; value: unknown }
return {
result[key] = {
id: variable.id,
name: variable.name,
type: variable.type,
value: variable.value,
}
})
}
return result
}
} catch {
// pass

View File

@@ -207,9 +207,15 @@ describe('Workflow Variables API Route', () => {
update: { results: [{}] },
})
const variables = [
{ id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' },
]
const variables = {
'var-1': {
id: 'var-1',
workflowId: 'workflow-123',
name: 'test',
type: 'string',
value: 'hello',
},
}
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', {
method: 'POST',
@@ -242,9 +248,15 @@ describe('Workflow Variables API Route', () => {
isWorkspaceOwner: false,
})
const variables = [
{ id: 'var-1', workflowId: 'workflow-123', name: 'test', type: 'string', value: 'hello' },
]
const variables = {
'var-1': {
id: 'var-1',
workflowId: 'workflow-123',
name: 'test',
type: 'string',
value: 'hello',
},
}
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', {
method: 'POST',
@@ -277,7 +289,6 @@ describe('Workflow Variables API Route', () => {
isWorkspaceOwner: false,
})
// Invalid data - missing required fields
const invalidData = { variables: [{ name: 'test' }] }
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123/variables', {

View File

@@ -11,16 +11,22 @@ import type { Variable } from '@/stores/panel/variables/types'
const logger = createLogger('WorkflowVariablesAPI')
const VariableSchema = z.object({
id: z.string(),
workflowId: z.string(),
name: z.string(),
type: z.enum(['string', 'number', 'boolean', 'object', 'array', 'plain']),
value: z.union([
z.string(),
z.number(),
z.boolean(),
z.record(z.unknown()),
z.array(z.unknown()),
]),
})
const VariablesSchema = z.object({
variables: z.array(
z.object({
id: z.string(),
workflowId: z.string(),
name: z.string(),
type: z.enum(['string', 'number', 'boolean', 'object', 'array', 'plain']),
value: z.union([z.string(), z.number(), z.boolean(), z.record(z.any()), z.array(z.any())]),
})
),
variables: z.record(z.string(), VariableSchema),
})
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
@@ -60,21 +66,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
try {
const { variables } = VariablesSchema.parse(body)
// Format variables for storage
const variablesRecord: Record<string, Variable> = {}
variables.forEach((variable) => {
variablesRecord[variable.id] = variable
})
// Replace variables completely with the incoming ones
// Variables are already in Record format - use directly
// The frontend is the source of truth for what variables should exist
const updatedVariables = variablesRecord
// Update workflow with variables
await db
.update(workflow)
.set({
variables: updatedVariables,
variables,
updatedAt: new Date(),
})
.where(eq(workflow.id, workflowId))
@@ -148,8 +145,9 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id:
headers,
}
)
} catch (error: any) {
} catch (error) {
logger.error(`[${requestId}] Workflow variables fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -332,7 +332,6 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
return (
<WorkflowPreview
workflowState={template.state}
showSubBlocks={true}
height='100%'
width='100%'
isPannable={true}

View File

@@ -204,7 +204,6 @@ function TemplateCardInner({
{normalizedState && isInView ? (
<WorkflowPreview
workflowState={normalizedState}
showSubBlocks={false}
height={180}
width='100%'
isPannable={false}

View File

@@ -0,0 +1 @@
export { SnapshotContextMenu } from './snapshot-context-menu'

View File

@@ -0,0 +1,97 @@
'use client'
import type { RefObject } from 'react'
import { createPortal } from 'react-dom'
import {
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
interface SnapshotContextMenuProps {
isOpen: boolean
position: { x: number; y: number }
menuRef: RefObject<HTMLDivElement | null>
onClose: () => void
onCopy: () => void
onSearch?: () => void
wrapText?: boolean
onToggleWrap?: () => void
/** When true, only shows Copy option (for subblock values) */
copyOnly?: boolean
}
/**
* Context menu for execution snapshot sidebar.
* Provides copy, search, and display options.
* Uses createPortal to render outside any transformed containers (like modals).
*/
export function SnapshotContextMenu({
isOpen,
position,
menuRef,
onClose,
onCopy,
onSearch,
wrapText,
onToggleWrap,
copyOnly = false,
}: SnapshotContextMenuProps) {
if (typeof document === 'undefined') return null
return createPortal(
<Popover
open={isOpen}
onOpenChange={onClose}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${position.x}px`,
top: `${position.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
<PopoverItem
onClick={() => {
onCopy()
onClose()
}}
>
Copy
</PopoverItem>
{!copyOnly && onSearch && (
<>
<PopoverDivider />
<PopoverItem
onClick={() => {
onSearch()
onClose()
}}
>
Search
</PopoverItem>
</>
)}
{!copyOnly && onToggleWrap && (
<>
<PopoverDivider />
<PopoverItem showCheck={wrapText} onClick={onToggleWrap}>
Wrap Text
</PopoverItem>
</>
)}
</PopoverContent>
</Popover>,
document.body
)
}

View File

@@ -1,12 +1,23 @@
'use client'
import { useEffect, useMemo, useState } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { AlertCircle, Loader2 } from 'lucide-react'
import { Modal, ModalBody, ModalContent, ModalHeader } from '@/components/emcn'
import { createPortal } from 'react-dom'
import {
Modal,
ModalBody,
ModalContent,
ModalHeader,
Popover,
PopoverAnchor,
PopoverContent,
PopoverItem,
} from '@/components/emcn'
import { redactApiKeys } from '@/lib/core/security/redaction'
import { cn } from '@/lib/core/utils/cn'
import {
BlockDetailsSidebar,
getLeftmostBlockId,
WorkflowPreview,
} from '@/app/workspace/[workspaceId]/w/components/preview'
import { useExecutionSnapshot } from '@/hooks/queries/logs'
@@ -60,6 +71,46 @@ export function ExecutionSnapshot({
}: ExecutionSnapshotProps) {
const { data, isLoading, error } = useExecutionSnapshot(executionId)
const [pinnedBlockId, setPinnedBlockId] = useState<string | null>(null)
const autoSelectedForExecutionRef = useRef<string | null>(null)
const [isMenuOpen, setIsMenuOpen] = useState(false)
const [menuPosition, setMenuPosition] = useState({ x: 0, y: 0 })
const [contextMenuBlockId, setContextMenuBlockId] = useState<string | null>(null)
const menuRef = useRef<HTMLDivElement>(null)
const closeMenu = useCallback(() => {
setIsMenuOpen(false)
setContextMenuBlockId(null)
}, [])
const handleCanvasContextMenu = useCallback((e: React.MouseEvent) => {
e.preventDefault()
e.stopPropagation()
setContextMenuBlockId(null)
setMenuPosition({ x: e.clientX, y: e.clientY })
setIsMenuOpen(true)
}, [])
const handleNodeContextMenu = useCallback(
(blockId: string, mousePosition: { x: number; y: number }) => {
setContextMenuBlockId(blockId)
setMenuPosition(mousePosition)
setIsMenuOpen(true)
},
[]
)
const handleCopyExecutionId = useCallback(() => {
navigator.clipboard.writeText(executionId)
closeMenu()
}, [executionId, closeMenu])
const handleOpenDetails = useCallback(() => {
if (contextMenuBlockId) {
setPinnedBlockId(contextMenuBlockId)
}
closeMenu()
}, [contextMenuBlockId, closeMenu])
const blockExecutions = useMemo(() => {
if (!traceSpans || !Array.isArray(traceSpans)) return {}
@@ -97,12 +148,21 @@ export function ExecutionSnapshot({
return blockExecutionMap
}, [traceSpans])
useEffect(() => {
setPinnedBlockId(null)
}, [executionId])
const workflowState = data?.workflowState as WorkflowState | undefined
// Auto-select the leftmost block once when data loads for a new executionId
useEffect(() => {
if (
workflowState &&
!isMigratedWorkflowState(workflowState) &&
autoSelectedForExecutionRef.current !== executionId
) {
autoSelectedForExecutionRef.current = executionId
const leftmostId = getLeftmostBlockId(workflowState)
setPinnedBlockId(leftmostId)
}
}, [executionId, workflowState])
const renderContent = () => {
if (isLoading) {
return (
@@ -169,22 +229,26 @@ export function ExecutionSnapshot({
<div
style={{ height, width }}
className={cn(
'flex overflow-hidden rounded-[4px] border border-[var(--border)]',
'flex overflow-hidden',
!isModal && 'rounded-[4px] border border-[var(--border)]',
className
)}
>
<div className='h-full flex-1'>
<div className='h-full flex-1' onContextMenu={handleCanvasContextMenu}>
<WorkflowPreview
workflowState={workflowState}
showSubBlocks={true}
isPannable={true}
defaultPosition={{ x: 0, y: 0 }}
defaultZoom={0.8}
onNodeClick={(blockId) => {
setPinnedBlockId((prev) => (prev === blockId ? null : blockId))
setPinnedBlockId(blockId)
}}
onNodeContextMenu={handleNodeContextMenu}
onPaneClick={() => setPinnedBlockId(null)}
cursorStyle='pointer'
executedBlocks={blockExecutions}
selectedBlockId={pinnedBlockId}
lightweight
/>
</div>
{pinnedBlockId && workflowState.blocks[pinnedBlockId] && (
@@ -193,32 +257,74 @@ export function ExecutionSnapshot({
executionData={blockExecutions[pinnedBlockId]}
allBlockExecutions={blockExecutions}
workflowBlocks={workflowState.blocks}
workflowVariables={workflowState.variables}
loops={workflowState.loops}
parallels={workflowState.parallels}
isExecutionMode
onClose={() => setPinnedBlockId(null)}
/>
)}
</div>
)
}
const canvasContextMenu =
typeof document !== 'undefined'
? createPortal(
<Popover
open={isMenuOpen}
onOpenChange={closeMenu}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${menuPosition.x}px`,
top: `${menuPosition.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{contextMenuBlockId && (
<PopoverItem onClick={handleOpenDetails}>Open Details</PopoverItem>
)}
<PopoverItem onClick={handleCopyExecutionId}>Copy Execution ID</PopoverItem>
</PopoverContent>
</Popover>,
document.body
)
: null
if (isModal) {
return (
<Modal
open={isOpen}
onOpenChange={(open) => {
if (!open) {
setPinnedBlockId(null)
onClose()
}
}}
>
<ModalContent size='full' className='flex h-[90vh] flex-col'>
<ModalHeader>Workflow State</ModalHeader>
<>
<Modal
open={isOpen}
onOpenChange={(open) => {
if (!open) {
setPinnedBlockId(null)
onClose()
}
}}
>
<ModalContent size='full' className='flex h-[90vh] flex-col'>
<ModalHeader>Workflow State</ModalHeader>
<ModalBody className='!p-0 min-h-0 flex-1'>{renderContent()}</ModalBody>
</ModalContent>
</Modal>
<ModalBody className='!p-0 min-h-0 flex-1 overflow-hidden'>{renderContent()}</ModalBody>
</ModalContent>
</Modal>
{canvasContextMenu}
</>
)
}
return renderContent()
return (
<>
{renderContent()}
{canvasContextMenu}
</>
)
}

View File

@@ -1,13 +1,27 @@
'use client'
import type React from 'react'
import { memo, useCallback, useMemo, useState } from 'react'
import { memo, useCallback, useMemo, useRef, useState } from 'react'
import clsx from 'clsx'
import { ChevronDown, Code } from '@/components/emcn'
import { ArrowDown, ArrowUp, X } from 'lucide-react'
import { createPortal } from 'react-dom'
import {
Button,
ChevronDown,
Code,
Input,
Popover,
PopoverAnchor,
PopoverContent,
PopoverDivider,
PopoverItem,
} from '@/components/emcn'
import { WorkflowIcon } from '@/components/icons'
import { cn } from '@/lib/core/utils/cn'
import { LoopTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-config'
import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-config'
import { getBlock, getBlockByToolName } from '@/blocks'
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
import type { TraceSpan } from '@/stores/logs/filters/types'
interface TraceSpansProps {
@@ -370,7 +384,7 @@ function SpanContent({
}
/**
* Renders input/output section with collapsible content
* Renders input/output section with collapsible content, context menu, and search
*/
function InputOutputSection({
label,
@@ -391,14 +405,63 @@ function InputOutputSection({
}) {
const sectionKey = `${spanId}-${sectionType}`
const isExpanded = expandedSections.has(sectionKey)
const contentRef = useRef<HTMLDivElement>(null)
const menuRef = useRef<HTMLDivElement>(null)
// Context menu state
const [isContextMenuOpen, setIsContextMenuOpen] = useState(false)
const [contextMenuPosition, setContextMenuPosition] = useState({ x: 0, y: 0 })
// Code viewer features
const {
wrapText,
toggleWrapText,
isSearchActive,
searchQuery,
setSearchQuery,
matchCount,
currentMatchIndex,
activateSearch,
closeSearch,
goToNextMatch,
goToPreviousMatch,
handleMatchCountChange,
searchInputRef,
} = useCodeViewerFeatures({ contentRef })
const jsonString = useMemo(() => {
if (!data) return ''
return JSON.stringify(data, null, 2)
}, [data])
const handleContextMenu = useCallback((e: React.MouseEvent) => {
e.preventDefault()
e.stopPropagation()
setContextMenuPosition({ x: e.clientX, y: e.clientY })
setIsContextMenuOpen(true)
}, [])
const closeContextMenu = useCallback(() => {
setIsContextMenuOpen(false)
}, [])
const handleCopy = useCallback(() => {
navigator.clipboard.writeText(jsonString)
closeContextMenu()
}, [jsonString, closeContextMenu])
const handleSearch = useCallback(() => {
activateSearch()
closeContextMenu()
}, [activateSearch, closeContextMenu])
const handleToggleWrap = useCallback(() => {
toggleWrapText()
closeContextMenu()
}, [toggleWrapText, closeContextMenu])
return (
<div className='flex min-w-0 flex-col gap-[8px] overflow-hidden'>
<div className='relative flex min-w-0 flex-col gap-[8px] overflow-hidden'>
<div
className='group flex cursor-pointer items-center justify-between'
onClick={() => onToggle(sectionKey)}
@@ -433,12 +496,101 @@ function InputOutputSection({
/>
</div>
{isExpanded && (
<Code.Viewer
code={jsonString}
language='json'
className='!bg-[var(--surface-3)] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
wrapText
/>
<>
<div ref={contentRef} onContextMenu={handleContextMenu}>
<Code.Viewer
code={jsonString}
language='json'
className='!bg-[var(--surface-3)] max-h-[300px] min-h-0 max-w-full rounded-[6px] border-0 [word-break:break-all]'
wrapText={wrapText}
searchQuery={isSearchActive ? searchQuery : undefined}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange}
/>
</div>
{/* Search Overlay */}
{isSearchActive && (
<div
className='absolute top-0 right-0 z-30 flex h-[34px] items-center gap-[6px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-1)] px-[6px] shadow-sm'
onClick={(e) => e.stopPropagation()}
>
<Input
ref={searchInputRef}
type='text'
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
placeholder='Search...'
className='mr-[2px] h-[23px] w-[94px] text-[12px]'
/>
<span
className={cn(
'min-w-[45px] text-center text-[11px]',
matchCount > 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]'
)}
>
{matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : '0/0'}
</span>
<Button
variant='ghost'
className='!p-1'
onClick={goToPreviousMatch}
disabled={matchCount === 0}
aria-label='Previous match'
>
<ArrowUp className='h-[12px] w-[12px]' />
</Button>
<Button
variant='ghost'
className='!p-1'
onClick={goToNextMatch}
disabled={matchCount === 0}
aria-label='Next match'
>
<ArrowDown className='h-[12px] w-[12px]' />
</Button>
<Button
variant='ghost'
className='!p-1'
onClick={closeSearch}
aria-label='Close search'
>
<X className='h-[12px] w-[12px]' />
</Button>
</div>
)}
{/* Context Menu - rendered in portal to avoid transform/overflow clipping */}
{typeof document !== 'undefined' &&
createPortal(
<Popover
open={isContextMenuOpen}
onOpenChange={closeContextMenu}
variant='secondary'
size='sm'
colorScheme='inverted'
>
<PopoverAnchor
style={{
position: 'fixed',
left: `${contextMenuPosition.x}px`,
top: `${contextMenuPosition.y}px`,
width: '1px',
height: '1px',
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
<PopoverItem onClick={handleCopy}>Copy</PopoverItem>
<PopoverDivider />
<PopoverItem onClick={handleSearch}>Search</PopoverItem>
<PopoverItem showCheck={wrapText} onClick={handleToggleWrap}>
Wrap Text
</PopoverItem>
</PopoverContent>
</Popover>,
document.body
)}
</>
)}
</div>
)

View File

@@ -87,7 +87,7 @@ export function LogRowContextMenu({
onClose()
}}
>
Open Preview
Open Snapshot
</PopoverItem>
{/* Filter actions */}

View File

@@ -210,7 +210,6 @@ function TemplateCardInner({
{normalizedState && isInView ? (
<WorkflowPreview
workflowState={normalizedState}
showSubBlocks={false}
height={180}
width='100%'
isPannable={false}

View File

@@ -45,7 +45,7 @@ import {
useFloatBoundarySync,
useFloatDrag,
useFloatResize,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-float'
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/float'
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
import type { BlockLog, ExecutionResult } from '@/executor/types'
import { getChatPosition, useChatStore } from '@/stores/chat/store'
@@ -726,7 +726,9 @@ export function Chat() {
(e: KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault()
handleSendMessage()
if (!isStreaming && !isExecuting) {
handleSendMessage()
}
} else if (e.key === 'ArrowUp') {
e.preventDefault()
if (promptHistory.length > 0) {
@@ -749,7 +751,7 @@ export function Chat() {
}
}
},
[handleSendMessage, promptHistory, historyIndex]
[handleSendMessage, promptHistory, historyIndex, isStreaming, isExecuting]
)
/**
@@ -1061,7 +1063,7 @@ export function Chat() {
onKeyDown={handleKeyPress}
placeholder={isDragOver ? 'Drop files here...' : 'Type a message...'}
className='w-full border-0 bg-transparent pr-[56px] pl-[4px] shadow-none focus-visible:ring-0 focus-visible:ring-offset-0'
disabled={!activeWorkflowId || isExecuting}
disabled={!activeWorkflowId}
/>
{/* Buttons positioned absolutely on the right */}
@@ -1091,7 +1093,8 @@ export function Chat() {
disabled={
(!chatMessage.trim() && chatFiles.length === 0) ||
!activeWorkflowId ||
isExecuting
isExecuting ||
isStreaming
}
className={cn(
'h-[22px] w-[22px] rounded-full p-0 transition-colors',

View File

@@ -118,7 +118,7 @@ export function BlockContextMenu({
{getToggleEnabledLabel()}
</PopoverItem>
)}
{!allNoteBlocks && (
{!allNoteBlocks && !isSubflow && (
<PopoverItem
disabled={disableEdit}
onClick={() => {

View File

@@ -4,10 +4,12 @@ import { useState } from 'react'
import { Loader2 } from 'lucide-react'
import { Button } from '@/components/emcn'
import { canEditUsageLimit } from '@/lib/billing/subscriptions/utils'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { isHosted } from '@/lib/core/config/feature-flags'
import { useSubscriptionData, useUpdateUsageLimit } from '@/hooks/queries/subscription'
import { useCopilotStore } from '@/stores/panel/copilot/store'
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
const LIMIT_INCREMENTS = [0, 50, 100] as const
function roundUpToNearest50(value: number): number {
@@ -15,7 +17,7 @@ function roundUpToNearest50(value: number): number {
}
export function UsageLimitActions() {
const { data: subscriptionData } = useSubscriptionData()
const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled })
const updateUsageLimitMutation = useUpdateUsageLimit()
const subscription = subscriptionData?.data

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { Maximize2 } from 'lucide-react'
import {
@@ -17,6 +17,7 @@ import { Skeleton } from '@/components/ui'
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
import {
BlockDetailsSidebar,
getLeftmostBlockId,
WorkflowPreview,
} from '@/app/workspace/[workspaceId]/w/components/preview'
import { useDeploymentVersionState, useRevertToVersion } from '@/hooks/queries/workflows'
@@ -57,6 +58,7 @@ export function GeneralDeploy({
const [showPromoteDialog, setShowPromoteDialog] = useState(false)
const [showExpandedPreview, setShowExpandedPreview] = useState(false)
const [expandedSelectedBlockId, setExpandedSelectedBlockId] = useState<string | null>(null)
const hasAutoSelectedRef = useRef(false)
const [versionToLoad, setVersionToLoad] = useState<number | null>(null)
const [versionToPromote, setVersionToPromote] = useState<number | null>(null)
@@ -131,6 +133,19 @@ export function GeneralDeploy({
const hasDeployedData = deployedState && Object.keys(deployedState.blocks || {}).length > 0
const showLoadingSkeleton = isLoadingDeployedState && !hasDeployedData
// Auto-select the leftmost block once when expanded preview opens
useEffect(() => {
if (showExpandedPreview && workflowToShow && !hasAutoSelectedRef.current) {
hasAutoSelectedRef.current = true
const leftmostId = getLeftmostBlockId(workflowToShow)
setExpandedSelectedBlockId(leftmostId)
}
// Reset when modal closes
if (!showExpandedPreview) {
hasAutoSelectedRef.current = false
}
}, [showExpandedPreview, workflowToShow])
if (showLoadingSkeleton) {
return (
<div className='space-y-[12px]'>
@@ -186,7 +201,7 @@ export function GeneralDeploy({
</div>
<div
className='[&_*]:!cursor-default relative h-[260px] w-full cursor-default overflow-hidden rounded-[4px] border border-[var(--border)]'
className='relative h-[260px] w-full overflow-hidden rounded-[4px] border border-[var(--border)]'
onWheelCapture={(e) => {
if (e.ctrlKey || e.metaKey) return
e.stopPropagation()
@@ -194,28 +209,28 @@ export function GeneralDeploy({
>
{workflowToShow ? (
<>
<WorkflowPreview
workflowState={workflowToShow}
showSubBlocks={true}
height='100%'
width='100%'
isPannable={true}
defaultPosition={{ x: 0, y: 0 }}
defaultZoom={0.6}
/>
<div className='[&_*]:!cursor-default h-full w-full cursor-default'>
<WorkflowPreview
workflowState={workflowToShow}
height='100%'
width='100%'
isPannable={true}
defaultPosition={{ x: 0, y: 0 }}
defaultZoom={0.6}
/>
</div>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='default'
size='sm'
onClick={() => setShowExpandedPreview(true)}
className='absolute top-[8px] right-[8px] z-10'
className='absolute right-[8px] bottom-[8px] z-10 h-[28px] w-[28px] cursor-pointer border border-[var(--border)] bg-transparent p-0 backdrop-blur-sm hover:bg-[var(--surface-3)]'
>
<Maximize2 className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='bottom'>Expand preview</Tooltip.Content>
<Tooltip.Content side='top'>See preview</Tooltip.Content>
</Tooltip.Root>
</>
) : (
@@ -316,21 +331,23 @@ export function GeneralDeploy({
<div className='h-full flex-1'>
<WorkflowPreview
workflowState={workflowToShow}
showSubBlocks={true}
isPannable={true}
defaultPosition={{ x: 0, y: 0 }}
defaultZoom={0.6}
onNodeClick={(blockId) => {
setExpandedSelectedBlockId(
expandedSelectedBlockId === blockId ? null : blockId
)
setExpandedSelectedBlockId(blockId)
}}
cursorStyle='pointer'
onPaneClick={() => setExpandedSelectedBlockId(null)}
selectedBlockId={expandedSelectedBlockId}
lightweight
/>
</div>
{expandedSelectedBlockId && workflowToShow.blocks?.[expandedSelectedBlockId] && (
<BlockDetailsSidebar
block={workflowToShow.blocks[expandedSelectedBlockId]}
workflowVariables={workflowToShow.variables}
loops={workflowToShow.loops}
parallels={workflowToShow.parallels}
onClose={() => setExpandedSelectedBlockId(null)}
/>
)}

View File

@@ -488,7 +488,6 @@ const OGCaptureContainer = forwardRef<HTMLDivElement>((_, ref) => {
>
<WorkflowPreview
workflowState={workflowState}
showSubBlocks={false}
height='100%'
width='100%'
isPannable={false}
@@ -529,7 +528,6 @@ function TemplatePreviewContent({ existingTemplate }: TemplatePreviewContentProp
<WorkflowPreview
key={`template-preview-${existingTemplate.id}`}
workflowState={workflowState}
showSubBlocks={true}
height='100%'
width='100%'
isPannable={true}

View File

@@ -5,6 +5,7 @@ import { createLogger } from '@sim/logger'
import { ExternalLink, Users } from 'lucide-react'
import { Button, Combobox } from '@/components/emcn/components'
import { getSubscriptionStatus } from '@/lib/billing/client'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { getPollingProviderFromOAuth } from '@/lib/credential-sets/providers'
import {
getCanonicalScopesForProvider,
@@ -26,6 +27,7 @@ import { getMissingRequiredScopes } from '@/hooks/use-oauth-scope-status'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('CredentialSelector')
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
interface CredentialSelectorProps {
blockId: string
@@ -54,7 +56,7 @@ export function CredentialSelector({
const supportsCredentialSets = subBlock.supportsCredentialSets || false
const { data: organizationsData } = useOrganizations()
const { data: subscriptionData } = useSubscriptionData()
const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled })
const activeOrganization = organizationsData?.activeOrganization
const subscriptionStatus = getSubscriptionStatus(subscriptionData?.data)
const hasTeamPlan = subscriptionStatus.isTeam || subscriptionStatus.isEnterprise

View File

@@ -19,7 +19,9 @@ export function ScheduleInfo({ blockId, isPreview = false }: ScheduleInfoProps)
const params = useParams()
const workflowId = params.workflowId as string
const scheduleTimezone = useSubBlockStore((state) => state.getValue(blockId, 'timezone'))
const scheduleTimezone = useSubBlockStore((state) => state.getValue(blockId, 'timezone')) as
| string
| undefined
const { data: schedule, isLoading } = useScheduleQuery(workflowId, blockId, {
enabled: !isPreview,

View File

@@ -902,7 +902,22 @@ export function ToolInput({
const [draggedIndex, setDraggedIndex] = useState<number | null>(null)
const [dragOverIndex, setDragOverIndex] = useState<number | null>(null)
const [usageControlPopoverIndex, setUsageControlPopoverIndex] = useState<number | null>(null)
const { data: customTools = [] } = useCustomTools(workspaceId)
const value = isPreview ? previewValue : storeValue
const selectedTools: StoredTool[] =
Array.isArray(value) &&
value.length > 0 &&
value[0] !== null &&
typeof value[0]?.type === 'string'
? (value as StoredTool[])
: []
const hasReferenceOnlyCustomTools = selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId && !tool.code
)
const shouldFetchCustomTools = !isPreview || hasReferenceOnlyCustomTools
const { data: customTools = [] } = useCustomTools(shouldFetchCustomTools ? workspaceId : '')
const {
mcpTools,
@@ -918,24 +933,15 @@ export function ToolInput({
const mcpDataLoading = mcpLoading || mcpServersLoading
const hasRefreshedRef = useRef(false)
const value = isPreview ? previewValue : storeValue
const selectedTools: StoredTool[] =
Array.isArray(value) &&
value.length > 0 &&
value[0] !== null &&
typeof value[0]?.type === 'string'
? (value as StoredTool[])
: []
const hasMcpTools = selectedTools.some((tool) => tool.type === 'mcp')
useEffect(() => {
if (isPreview) return
if (hasMcpTools && !hasRefreshedRef.current) {
hasRefreshedRef.current = true
forceRefreshMcpTools(workspaceId)
}
}, [hasMcpTools, forceRefreshMcpTools, workspaceId])
}, [hasMcpTools, forceRefreshMcpTools, workspaceId, isPreview])
/**
* Returns issue info for an MCP tool.

View File

@@ -43,10 +43,12 @@ export function TriggerSave({
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [isGeneratingTestUrl, setIsGeneratingTestUrl] = useState(false)
const storedTestUrl = useSubBlockStore((state) => state.getValue(blockId, 'testUrl'))
const storedTestUrl = useSubBlockStore((state) => state.getValue(blockId, 'testUrl')) as
| string
| null
const storedTestUrlExpiresAt = useSubBlockStore((state) =>
state.getValue(blockId, 'testUrlExpiresAt')
)
) as string | null
const isTestUrlExpired = useMemo(() => {
if (!storedTestUrlExpiresAt) return true

View File

@@ -32,7 +32,6 @@ export function createDragPreview(info: DragItemInfo): HTMLElement {
z-index: 9999;
`
// Create icon container
const iconContainer = document.createElement('div')
iconContainer.style.cssText = `
width: 24px;
@@ -45,7 +44,6 @@ export function createDragPreview(info: DragItemInfo): HTMLElement {
flex-shrink: 0;
`
// Clone the actual icon if provided
if (info.iconElement) {
const clonedIcon = info.iconElement.cloneNode(true) as HTMLElement
clonedIcon.style.width = '16px'
@@ -55,11 +53,10 @@ export function createDragPreview(info: DragItemInfo): HTMLElement {
iconContainer.appendChild(clonedIcon)
}
// Create text element
const text = document.createElement('span')
text.textContent = info.name
text.style.cssText = `
color: #FFFFFF;
color: var(--text-primary);
font-size: 16px;
font-weight: 500;
white-space: nowrap;

View File

@@ -1,5 +1,8 @@
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { useSubscriptionData } from '@/hooks/queries/subscription'
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
/**
* Simplified hook that uses React Query for usage limits.
* Provides usage exceeded status from existing subscription data.
@@ -12,7 +15,7 @@ export function useUsageLimits(options?: {
}) {
// For now, we only support user context via React Query
// Organization context should use useOrganizationBilling directly
const { data: subscriptionData, isLoading } = useSubscriptionData()
const { data: subscriptionData, isLoading } = useSubscriptionData({ enabled: isBillingEnabled })
const usageExceeded = subscriptionData?.data?.usage?.isExceeded || false

View File

@@ -47,6 +47,8 @@ export interface SubflowNodeData {
parentId?: string
extent?: 'parent'
isPreview?: boolean
/** Whether this subflow is selected in preview mode */
isPreviewSelected?: boolean
kind: 'loop' | 'parallel'
name?: string
}
@@ -123,15 +125,17 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
return { top: `${HANDLE_POSITIONS.DEFAULT_Y_OFFSET}px`, transform: 'translateY(-50%)' }
}
const isPreviewSelected = data?.isPreviewSelected || false
/**
* Determine the ring styling based on subflow state priority:
* 1. Focused (selected in editor) - blue ring
* 1. Focused (selected in editor) or preview selected - blue ring
* 2. Diff status (version comparison) - green/orange ring
*/
const hasRing = isFocused || diffStatus === 'new' || diffStatus === 'edited'
const hasRing = isFocused || isPreviewSelected || diffStatus === 'new' || diffStatus === 'edited'
const ringStyles = cn(
hasRing && 'ring-[1.75px]',
isFocused && 'ring-[var(--brand-secondary)]',
(isFocused || isPreviewSelected) && 'ring-[var(--brand-secondary)]',
diffStatus === 'new' && 'ring-[#22C55F]',
diffStatus === 'edited' && 'ring-[var(--warning)]'
)

View File

@@ -31,6 +31,7 @@ interface LogRowContextMenuProps {
onFilterByBlock: (blockId: string) => void
onFilterByStatus: (status: 'error' | 'info') => void
onFilterByRunId: (runId: string) => void
onCopyRunId: (runId: string) => void
onClearFilters: () => void
onClearConsole: () => void
hasActiveFilters: boolean
@@ -50,6 +51,7 @@ export function LogRowContextMenu({
onFilterByBlock,
onFilterByStatus,
onFilterByRunId,
onCopyRunId,
onClearFilters,
onClearConsole,
hasActiveFilters,
@@ -79,18 +81,18 @@ export function LogRowContextMenu({
}}
/>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{/* Clear filters at top when active */}
{hasActiveFilters && (
{/* Copy actions */}
{entry && hasRunId && (
<>
<PopoverItem
onClick={() => {
onClearFilters()
onCopyRunId(entry.executionId!)
onClose()
}}
>
Clear All Filters
Copy Run ID
</PopoverItem>
{entry && <PopoverDivider />}
<PopoverDivider />
</>
)}
@@ -129,6 +131,18 @@ export function LogRowContextMenu({
</>
)}
{/* Clear filters */}
{hasActiveFilters && (
<PopoverItem
onClick={() => {
onClearFilters()
onClose()
}}
>
Clear All Filters
</PopoverItem>
)}
{/* Destructive action */}
{(entry || hasActiveFilters) && <PopoverDivider />}
<PopoverItem

View File

@@ -49,6 +49,7 @@ import {
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/hooks'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { getBlock } from '@/blocks'
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
import { OUTPUT_PANEL_WIDTH, TERMINAL_HEIGHT } from '@/stores/constants'
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
import { useGeneralStore } from '@/stores/settings/general/store'
@@ -337,27 +338,34 @@ export function Terminal() {
const [mainOptionsOpen, setMainOptionsOpen] = useState(false)
const [outputOptionsOpen, setOutputOptionsOpen] = useState(false)
// Output panel search state
const [isOutputSearchActive, setIsOutputSearchActive] = useState(false)
const [outputSearchQuery, setOutputSearchQuery] = useState('')
const [matchCount, setMatchCount] = useState(0)
const [currentMatchIndex, setCurrentMatchIndex] = useState(0)
const outputSearchInputRef = useRef<HTMLInputElement>(null)
const outputContentRef = useRef<HTMLDivElement>(null)
const {
isSearchActive: isOutputSearchActive,
searchQuery: outputSearchQuery,
setSearchQuery: setOutputSearchQuery,
matchCount,
currentMatchIndex,
activateSearch: activateOutputSearch,
closeSearch: closeOutputSearch,
goToNextMatch,
goToPreviousMatch,
handleMatchCountChange,
searchInputRef: outputSearchInputRef,
} = useCodeViewerFeatures({
contentRef: outputContentRef,
externalWrapText: wrapText,
onWrapTextChange: setWrapText,
})
// Training controls state
const [isTrainingEnvEnabled, setIsTrainingEnvEnabled] = useState(false)
const showTrainingControls = useGeneralStore((state) => state.showTrainingControls)
const { isTraining, toggleModal: toggleTrainingModal, stopTraining } = useCopilotTrainingStore()
// Playground state
const [isPlaygroundEnabled, setIsPlaygroundEnabled] = useState(false)
// Terminal resize hooks
const { handleMouseDown } = useTerminalResize()
const { handleMouseDown: handleOutputPanelResizeMouseDown } = useOutputPanelResize()
// Terminal filters hook
const {
filters,
sortConfig,
@@ -370,12 +378,10 @@ export function Terminal() {
hasActiveFilters,
} = useTerminalFilters()
// Context menu state
const [hasSelection, setHasSelection] = useState(false)
const [contextMenuEntry, setContextMenuEntry] = useState<ConsoleEntry | null>(null)
const [storedSelectionText, setStoredSelectionText] = useState('')
// Context menu hooks
const {
isOpen: isLogRowMenuOpen,
position: logRowMenuPosition,
@@ -577,44 +583,6 @@ export function Terminal() {
}
}, [activeWorkflowId, clearWorkflowConsole])
const activateOutputSearch = useCallback(() => {
setIsOutputSearchActive(true)
setTimeout(() => {
outputSearchInputRef.current?.focus()
}, 0)
}, [])
const closeOutputSearch = useCallback(() => {
setIsOutputSearchActive(false)
setOutputSearchQuery('')
setMatchCount(0)
setCurrentMatchIndex(0)
}, [])
/**
* Navigates to the next match in the search results.
*/
const goToNextMatch = useCallback(() => {
if (matchCount === 0) return
setCurrentMatchIndex((prev) => (prev + 1) % matchCount)
}, [matchCount])
/**
* Navigates to the previous match in the search results.
*/
const goToPreviousMatch = useCallback(() => {
if (matchCount === 0) return
setCurrentMatchIndex((prev) => (prev - 1 + matchCount) % matchCount)
}, [matchCount])
/**
* Handles match count change from Code.Viewer.
*/
const handleMatchCountChange = useCallback((count: number) => {
setMatchCount(count)
setCurrentMatchIndex(0)
}, [])
const handleClearConsole = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
@@ -683,6 +651,14 @@ export function Terminal() {
[toggleRunId, closeLogRowMenu]
)
const handleCopyRunId = useCallback(
(runId: string) => {
navigator.clipboard.writeText(runId)
closeLogRowMenu()
},
[closeLogRowMenu]
)
const handleClearConsoleFromMenu = useCallback(() => {
clearCurrentWorkflowConsole()
}, [clearCurrentWorkflowConsole])
@@ -885,66 +861,20 @@ export function Terminal() {
}, [expandToLastHeight, selectedEntry, showInput, hasInputData, isExpanded])
/**
* Handle Escape to close search or unselect entry
* Handle Escape to unselect entry (search close is handled by useCodeViewerFeatures)
*/
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Escape') {
if (e.key === 'Escape' && !isOutputSearchActive && selectedEntry) {
e.preventDefault()
// First close search if active
if (isOutputSearchActive) {
closeOutputSearch()
return
}
// Then unselect entry
if (selectedEntry) {
setSelectedEntry(null)
setAutoSelectEnabled(true)
}
setSelectedEntry(null)
setAutoSelectEnabled(true)
}
}
window.addEventListener('keydown', handleKeyDown)
return () => window.removeEventListener('keydown', handleKeyDown)
}, [selectedEntry, isOutputSearchActive, closeOutputSearch])
/**
* Handle Enter/Shift+Enter for search navigation when search input is focused
*/
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (!isOutputSearchActive) return
const isSearchInputFocused = document.activeElement === outputSearchInputRef.current
if (e.key === 'Enter' && isSearchInputFocused && matchCount > 0) {
e.preventDefault()
if (e.shiftKey) {
goToPreviousMatch()
} else {
goToNextMatch()
}
}
}
window.addEventListener('keydown', handleKeyDown)
return () => window.removeEventListener('keydown', handleKeyDown)
}, [isOutputSearchActive, matchCount, goToNextMatch, goToPreviousMatch])
/**
* Scroll to current match when it changes
*/
useEffect(() => {
if (!isOutputSearchActive || matchCount === 0 || !outputContentRef.current) return
// Find all match elements and scroll to the current one
const matchElements = outputContentRef.current.querySelectorAll('[data-search-match]')
const currentElement = matchElements[currentMatchIndex]
if (currentElement) {
currentElement.scrollIntoView({ block: 'center' })
}
}, [currentMatchIndex, isOutputSearchActive, matchCount])
}, [selectedEntry, isOutputSearchActive])
/**
* Adjust output panel width when sidebar or panel width changes.
@@ -1414,25 +1344,16 @@ export function Terminal() {
</div>
{/* Run ID */}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<span
className={clsx(
COLUMN_WIDTHS.RUN_ID,
COLUMN_BASE_CLASS,
'truncate font-medium font-mono text-[12px]'
)}
style={{ color: runIdColor?.text || '#D2D2D2' }}
>
{formatRunId(entry.executionId)}
</span>
</Tooltip.Trigger>
{entry.executionId && (
<Tooltip.Content>
<span className='font-mono text-[11px]'>{entry.executionId}</span>
</Tooltip.Content>
<span
className={clsx(
COLUMN_WIDTHS.RUN_ID,
COLUMN_BASE_CLASS,
'truncate font-medium font-mono text-[12px]'
)}
</Tooltip.Root>
style={{ color: runIdColor?.text || '#D2D2D2' }}
>
{formatRunId(entry.executionId)}
</span>
{/* Duration */}
<span
@@ -1489,9 +1410,7 @@ export function Terminal() {
variant='ghost'
className={clsx(
'px-[8px] py-[6px] text-[12px]',
!showInput &&
hasInputData &&
'!text-[var(--text-primary)] dark:!text-[var(--text-primary)]'
!showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
)}
onClick={(e) => {
e.stopPropagation()
@@ -1509,7 +1428,7 @@ export function Terminal() {
variant='ghost'
className={clsx(
'px-[8px] py-[6px] text-[12px]',
showInput && '!text-[var(--text-primary)]'
showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
)}
onClick={(e) => {
e.stopPropagation()
@@ -1839,6 +1758,7 @@ export function Terminal() {
onFilterByBlock={handleFilterByBlock}
onFilterByStatus={handleFilterByStatus}
onFilterByRunId={handleFilterByRunId}
onCopyRunId={handleCopyRunId}
onClearFilters={() => {
clearFilters()
closeLogRowMenu()

View File

@@ -34,8 +34,8 @@ export const ActionBar = memo(
const {
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeToggleBlockEnabled,
collaborativeToggleBlockHandles,
collaborativeBatchToggleBlockEnabled,
collaborativeBatchToggleBlockHandles,
} = useCollaborativeWorkflow()
const { activeWorkflowId } = useWorkflowRegistry()
const blocks = useWorkflowStore((state) => state.blocks)
@@ -121,7 +121,7 @@ export const ActionBar = memo(
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeToggleBlockEnabled(blockId)
collaborativeBatchToggleBlockEnabled([blockId])
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
@@ -161,29 +161,6 @@ export const ActionBar = memo(
</Tooltip.Root>
)}
{!isStartBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockId } })
)
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-[11px] w-[11px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
</Tooltip.Root>
)}
{!isNoteBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
@@ -192,7 +169,7 @@ export const ActionBar = memo(
onClick={(e) => {
e.stopPropagation()
if (!disabled) {
collaborativeToggleBlockHandles(blockId)
collaborativeBatchToggleBlockHandles([blockId])
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
@@ -211,6 +188,29 @@ export const ActionBar = memo(
</Tooltip.Root>
)}
{!isStartBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
)
}
}}
className='hover:!text-[var(--text-inverse)] h-[23px] w-[23px] rounded-[8px] bg-[var(--surface-7)] p-0 text-[var(--text-secondary)] hover:bg-[var(--brand-secondary)]'
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-[11px] w-[11px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
</Tooltip.Root>
)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button

View File

@@ -54,9 +54,11 @@ export function useWebhookInfo(blockId: string, workflowId: string): UseWebhookI
useCallback(
(state) => {
if (!activeWorkflowId) return undefined
return state.workflowValues[activeWorkflowId]?.[blockId]?.webhookProvider?.value as
| string
| undefined
const value = state.workflowValues[activeWorkflowId]?.[blockId]?.webhookProvider
if (typeof value === 'object' && value !== null && 'value' in value) {
return (value as { value?: unknown }).value as string | undefined
}
return value as string | undefined
},
[activeWorkflowId, blockId]
)

View File

@@ -10,6 +10,8 @@ export interface WorkflowBlockProps {
isActive?: boolean
isPending?: boolean
isPreview?: boolean
/** Whether this block is selected in preview mode */
isPreviewSelected?: boolean
subBlockValues?: Record<string, any>
blockState?: any
}

View File

@@ -32,6 +32,7 @@ export function shouldSkipBlockRender(
prevProps.data.isActive === nextProps.data.isActive &&
prevProps.data.isPending === nextProps.data.isPending &&
prevProps.data.isPreview === nextProps.data.isPreview &&
prevProps.data.isPreviewSelected === nextProps.data.isPreviewSelected &&
prevProps.data.config === nextProps.data.config &&
prevProps.data.subBlockValues === nextProps.data.subBlockValues &&
prevProps.data.blockState === nextProps.data.blockState &&

View File

@@ -624,7 +624,11 @@ export const WorkflowBlock = memo(function WorkflowBlock({
if (!activeWorkflowId) return
const current = useSubBlockStore.getState().workflowValues[activeWorkflowId]?.[id]
if (!current) return
const cred = current.credential?.value as string | undefined
const credValue = current.credential
const cred =
typeof credValue === 'object' && credValue !== null && 'value' in credValue
? ((credValue as { value?: unknown }).value as string | undefined)
: (credValue as string | undefined)
if (prevCredRef.current !== cred) {
prevCredRef.current = cred
const keys = Object.keys(current)

View File

@@ -40,10 +40,7 @@ const WorkflowEdgeComponent = ({
})
const isSelected = data?.isSelected ?? false
const isInsideLoop = data?.isInsideLoop ?? false
const parentLoopId = data?.parentLoopId
// Combined store subscription to reduce subscription overhead
const { diffAnalysis, isShowingDiff, isDiffReady } = useWorkflowDiffStore(
useShallow((state) => ({
diffAnalysis: state.diffAnalysis,
@@ -98,7 +95,8 @@ const WorkflowEdgeComponent = ({
} else if (edgeDiffStatus === 'new') {
color = 'var(--brand-tertiary)'
} else if (edgeRunStatus === 'success') {
color = 'var(--border-success)'
// Use green for preview mode, default for canvas execution
color = previewExecutionStatus ? 'var(--brand-tertiary-2)' : 'var(--border-success)'
} else if (edgeRunStatus === 'error') {
color = 'var(--text-error)'
}
@@ -120,34 +118,18 @@ const WorkflowEdgeComponent = ({
strokeDasharray: edgeDiffStatus === 'deleted' ? '10,5' : undefined,
opacity,
}
}, [style, edgeDiffStatus, isSelected, isErrorEdge, edgeRunStatus])
}, [style, edgeDiffStatus, isSelected, isErrorEdge, edgeRunStatus, previewExecutionStatus])
return (
<>
<BaseEdge
path={edgePath}
data-testid='workflow-edge'
style={edgeStyle}
interactionWidth={30}
data-edge-id={id}
data-parent-loop-id={parentLoopId}
data-is-selected={isSelected ? 'true' : 'false'}
data-is-inside-loop={isInsideLoop ? 'true' : 'false'}
/>
{/* Animate dash offset for edge movement effect */}
<animate
attributeName='stroke-dashoffset'
from={edgeDiffStatus === 'deleted' ? '15' : '10'}
to='0'
dur={edgeDiffStatus === 'deleted' ? '2s' : '1s'}
repeatCount='indefinite'
/>
<BaseEdge path={edgePath} style={edgeStyle} interactionWidth={30} />
{isSelected && (
<EdgeLabelRenderer>
<div
className='nodrag nopan group flex h-[22px] w-[22px] cursor-pointer items-center justify-center transition-colors'
style={{
position: 'absolute',
transform: `translate(-50%, -50%) translate(${labelX}px,${labelY}px)`,
pointerEvents: 'all',
zIndex: 100,

View File

@@ -1,8 +1,17 @@
export {
clearDragHighlights,
computeClampedPositionUpdates,
computeParentUpdateEntries,
getClampedPositionForNode,
isInEditableElement,
selectNodesDeferred,
validateTriggerPaste,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/workflow-canvas-helpers'
export { useFloatBoundarySync, useFloatDrag, useFloatResize } from './float'
export { useAutoLayout } from './use-auto-layout'
export { BLOCK_DIMENSIONS, useBlockDimensions } from './use-block-dimensions'
export { useBlockVisual } from './use-block-visual'
export { type CurrentWorkflow, useCurrentWorkflow } from './use-current-workflow'
export { useFloatBoundarySync, useFloatDrag, useFloatResize } from './use-float'
export { useNodeUtilities } from './use-node-utilities'
export { usePreventZoom } from './use-prevent-zoom'
export { useScrollManagement } from './use-scroll-management'

View File

@@ -21,14 +21,15 @@ interface UseBlockVisualProps {
/**
* Provides visual state and interaction handlers for workflow blocks.
* Computes ring styling based on execution, focus, diff, and run path states.
* In preview mode, all interactive and execution-related visual states are disabled.
* Computes ring styling based on execution, diff, deletion, and run path states.
* In preview mode, uses isPreviewSelected for selection highlighting.
*
* @param props - The hook properties
* @returns Visual state, click handler, and ring styling for the block
*/
export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVisualProps) {
const isPreview = data.isPreview ?? false
const isPreviewSelected = data.isPreviewSelected ?? false
const currentWorkflow = useCurrentWorkflow()
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
@@ -40,14 +41,13 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis
isDeletedBlock,
} = useBlockState(blockId, currentWorkflow, data)
const isActive = isPreview ? false : blockIsActive
// In preview mode, use isPreviewSelected for selection state
const isActive = isPreview ? isPreviewSelected : blockIsActive
const lastRunPath = useExecutionStore((state) => state.lastRunPath)
const runPathStatus = isPreview ? undefined : lastRunPath.get(blockId)
const setCurrentBlockId = usePanelEditorStore((state) => state.setCurrentBlockId)
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
const isFocused = isPreview ? false : currentBlockId === blockId
const handleClick = useCallback(() => {
if (!isPreview) {
@@ -60,12 +60,12 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis
getBlockRingStyles({
isActive,
isPending: isPreview ? false : isPending,
isFocused,
isDeletedBlock: isPreview ? false : isDeletedBlock,
diffStatus: isPreview ? undefined : diffStatus,
runPathStatus,
isPreviewSelection: isPreview && isPreviewSelected,
}),
[isActive, isPending, isFocused, isDeletedBlock, diffStatus, runPathStatus, isPreview]
[isActive, isPending, isDeletedBlock, diffStatus, runPathStatus, isPreview, isPreviewSelected]
)
return {

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { useReactFlow } from 'reactflow'
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
import { getBlock } from '@/blocks/registry'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('NodeUtilities')
@@ -208,28 +209,30 @@ export function useNodeUtilities(blocks: Record<string, any>) {
* to the content area bounds (after header and padding).
* @param nodeId ID of the node being repositioned
* @param newParentId ID of the new parent
* @param skipClamping If true, returns raw relative position without clamping to container bounds
* @returns Relative position coordinates {x, y} within the parent
*/
const calculateRelativePosition = useCallback(
(nodeId: string, newParentId: string): { x: number; y: number } => {
(nodeId: string, newParentId: string, skipClamping?: boolean): { x: number; y: number } => {
const nodeAbsPos = getNodeAbsolutePosition(nodeId)
const parentAbsPos = getNodeAbsolutePosition(newParentId)
const parentNode = getNodes().find((n) => n.id === newParentId)
// Calculate raw relative position (relative to parent origin)
const rawPosition = {
x: nodeAbsPos.x - parentAbsPos.x,
y: nodeAbsPos.y - parentAbsPos.y,
}
// Get container and block dimensions
if (skipClamping) {
return rawPosition
}
const parentNode = getNodes().find((n) => n.id === newParentId)
const containerDimensions = {
width: parentNode?.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: parentNode?.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
}
const blockDimensions = getBlockDimensions(nodeId)
// Clamp position to keep block inside content area
return clampPositionToContainer(rawPosition, containerDimensions, blockDimensions)
},
[getNodeAbsolutePosition, getNodes, getBlockDimensions]
@@ -298,12 +301,12 @@ export function useNodeUtilities(blocks: Record<string, any>) {
*/
const calculateLoopDimensions = useCallback(
(nodeId: string): { width: number; height: number } => {
// Check both React Flow's node.parentId AND blocks store's data.parentId
// This ensures we catch children even if React Flow hasn't re-rendered yet
const childNodes = getNodes().filter(
(node) => node.parentId === nodeId || blocks[node.id]?.data?.parentId === nodeId
const currentBlocks = useWorkflowStore.getState().blocks
const childBlockIds = Object.keys(currentBlocks).filter(
(id) => currentBlocks[id]?.data?.parentId === nodeId
)
if (childNodes.length === 0) {
if (childBlockIds.length === 0) {
return {
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
@@ -313,30 +316,28 @@ export function useNodeUtilities(blocks: Record<string, any>) {
let maxRight = 0
let maxBottom = 0
childNodes.forEach((node) => {
const { width: nodeWidth, height: nodeHeight } = getBlockDimensions(node.id)
// Use block position from store if available (more up-to-date)
const block = blocks[node.id]
const position = block?.position || node.position
maxRight = Math.max(maxRight, position.x + nodeWidth)
maxBottom = Math.max(maxBottom, position.y + nodeHeight)
})
for (const childId of childBlockIds) {
const child = currentBlocks[childId]
if (!child?.position) continue
const { width: childWidth, height: childHeight } = getBlockDimensions(childId)
maxRight = Math.max(maxRight, child.position.x + childWidth)
maxBottom = Math.max(maxBottom, child.position.y + childHeight)
}
const width = Math.max(
CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
CONTAINER_DIMENSIONS.LEFT_PADDING + maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
)
const height = Math.max(
CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
CONTAINER_DIMENSIONS.HEADER_HEIGHT +
CONTAINER_DIMENSIONS.TOP_PADDING +
maxBottom +
CONTAINER_DIMENSIONS.BOTTOM_PADDING
maxBottom + CONTAINER_DIMENSIONS.BOTTOM_PADDING
)
return { width, height }
},
[getNodes, getBlockDimensions, blocks]
[getBlockDimensions]
)
/**
@@ -345,29 +346,27 @@ export function useNodeUtilities(blocks: Record<string, any>) {
*/
const resizeLoopNodes = useCallback(
(updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => void) => {
const containerNodes = getNodes()
.filter((node) => node.type && isContainerType(node.type))
.map((node) => ({
...node,
depth: getNodeDepth(node.id),
const currentBlocks = useWorkflowStore.getState().blocks
const containerBlocks = Object.entries(currentBlocks)
.filter(([, block]) => block?.type && isContainerType(block.type))
.map(([id, block]) => ({
id,
block,
depth: getNodeDepth(id),
}))
// Sort by depth descending - process innermost containers first
// so their dimensions are correct when outer containers calculate sizes
.sort((a, b) => b.depth - a.depth)
containerNodes.forEach((node) => {
const dimensions = calculateLoopDimensions(node.id)
// Get current dimensions from the blocks store rather than React Flow's potentially stale state
const currentWidth = blocks[node.id]?.data?.width
const currentHeight = blocks[node.id]?.data?.height
for (const { id, block } of containerBlocks) {
const dimensions = calculateLoopDimensions(id)
const currentWidth = block?.data?.width
const currentHeight = block?.data?.height
// Only update if dimensions actually changed to avoid unnecessary re-renders
if (dimensions.width !== currentWidth || dimensions.height !== currentHeight) {
updateNodeDimensions(node.id, dimensions)
updateNodeDimensions(id, dimensions)
}
})
}
},
[getNodes, isContainerType, getNodeDepth, calculateLoopDimensions, blocks]
[isContainerType, getNodeDepth, calculateLoopDimensions]
)
/**

View File

@@ -7,66 +7,64 @@ export type BlockRunPathStatus = 'success' | 'error' | undefined
export interface BlockRingOptions {
isActive: boolean
isPending: boolean
isFocused: boolean
isDeletedBlock: boolean
diffStatus: BlockDiffStatus
runPathStatus: BlockRunPathStatus
isPreviewSelection?: boolean
}
/**
* Derives visual ring visibility and class names for workflow blocks
* based on execution, focus, diff, deletion, and run-path states.
* based on execution, diff, deletion, and run-path states.
*/
export function getBlockRingStyles(options: BlockRingOptions): {
hasRing: boolean
ringClassName: string
} {
const { isActive, isPending, isFocused, isDeletedBlock, diffStatus, runPathStatus } = options
const { isActive, isPending, isDeletedBlock, diffStatus, runPathStatus, isPreviewSelection } =
options
const hasRing =
isActive ||
isPending ||
isFocused ||
diffStatus === 'new' ||
diffStatus === 'edited' ||
isDeletedBlock ||
!!runPathStatus
const ringClassName = cn(
// Preview selection: static blue ring (standard thickness, no animation)
isActive && isPreviewSelection && 'ring-[1.75px] ring-[var(--brand-secondary)]',
// Executing block: pulsing success ring with prominent thickness
isActive && 'ring-[3.5px] ring-[var(--border-success)] animate-ring-pulse',
isActive &&
!isPreviewSelection &&
'ring-[3.5px] ring-[var(--border-success)] animate-ring-pulse',
// Non-active states use standard ring utilities
!isActive && hasRing && 'ring-[1.75px]',
// Pending state: warning ring
!isActive && isPending && 'ring-[var(--warning)]',
// Focused (selected) state: brand ring
!isActive && !isPending && isFocused && 'ring-[var(--brand-secondary)]',
// Deleted state (highest priority after active/pending/focused)
!isActive && !isPending && !isFocused && isDeletedBlock && 'ring-[var(--text-error)]',
// Deleted state (highest priority after active/pending)
!isActive && !isPending && isDeletedBlock && 'ring-[var(--text-error)]',
// Diff states
!isActive &&
!isPending &&
!isFocused &&
!isDeletedBlock &&
diffStatus === 'new' &&
'ring-[var(--brand-tertiary)]',
!isActive &&
!isPending &&
!isFocused &&
!isDeletedBlock &&
diffStatus === 'edited' &&
'ring-[var(--warning)]',
// Run path states (lowest priority - only show if no other states active)
!isActive &&
!isPending &&
!isFocused &&
!isDeletedBlock &&
!diffStatus &&
runPathStatus === 'success' &&
'ring-[var(--border-success)]',
!isActive &&
!isPending &&
!isFocused &&
!isDeletedBlock &&
!diffStatus &&
runPathStatus === 'error' &&

View File

@@ -0,0 +1,181 @@
import type { Edge, Node } from 'reactflow'
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { clampPositionToContainer } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities'
import type { BlockState } from '@/stores/workflows/workflow/types'
/**
* Checks if the currently focused element is an editable input.
* Returns true if the user is typing in an input, textarea, or contenteditable element.
*/
export function isInEditableElement(): boolean {
const activeElement = document.activeElement
return (
activeElement instanceof HTMLInputElement ||
activeElement instanceof HTMLTextAreaElement ||
activeElement?.hasAttribute('contenteditable') === true
)
}
interface TriggerValidationResult {
isValid: boolean
message?: string
}
/**
* Validates that pasting/duplicating trigger blocks won't violate constraints.
* Returns validation result with error message if invalid.
*/
export function validateTriggerPaste(
blocksToAdd: Array<{ type: string }>,
existingBlocks: Record<string, BlockState>,
action: 'paste' | 'duplicate'
): TriggerValidationResult {
for (const block of blocksToAdd) {
if (TriggerUtils.isAnyTriggerType(block.type)) {
const issue = TriggerUtils.getTriggerAdditionIssue(existingBlocks, block.type)
if (issue) {
const actionText = action === 'paste' ? 'paste' : 'duplicate'
const message =
issue.issue === 'legacy'
? `Cannot ${actionText} trigger blocks when a legacy Start block exists.`
: `A workflow can only have one ${issue.triggerName} trigger block. ${action === 'paste' ? 'Please remove the existing one before pasting.' : 'Cannot duplicate.'}`
return { isValid: false, message }
}
}
}
return { isValid: true }
}
/**
* Clears drag highlight classes and resets cursor state.
* Used when drag operations end or are cancelled.
*/
export function clearDragHighlights(): void {
document.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over').forEach((el) => {
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
})
document.body.style.cursor = ''
}
/**
* Selects nodes by their IDs after paste/duplicate operations.
* Defers selection to next animation frame to allow displayNodes to sync from store first.
* This is necessary because the component uses controlled state (nodes={displayNodes})
* and newly added blocks need time to propagate through the store → derivedNodes → displayNodes cycle.
*/
export function selectNodesDeferred(
nodeIds: string[],
setDisplayNodes: (updater: (nodes: Node[]) => Node[]) => void
): void {
const idsSet = new Set(nodeIds)
requestAnimationFrame(() => {
setDisplayNodes((nodes) =>
nodes.map((node) => ({
...node,
selected: idsSet.has(node.id),
}))
)
})
}
interface BlockData {
height?: number
data?: {
parentId?: string
width?: number
height?: number
}
}
/**
* Calculates the final position for a node, clamping it to parent container if needed.
* Returns the clamped position suitable for persistence.
*/
export function getClampedPositionForNode(
nodeId: string,
nodePosition: { x: number; y: number },
blocks: Record<string, BlockData>,
allNodes: Node[]
): { x: number; y: number } {
const currentBlock = blocks[nodeId]
const currentParentId = currentBlock?.data?.parentId
if (!currentParentId) {
return nodePosition
}
const parentNode = allNodes.find((n) => n.id === currentParentId)
if (!parentNode) {
return nodePosition
}
const containerDimensions = {
width: parentNode.data?.width || CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: parentNode.data?.height || CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
}
const blockDimensions = {
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
height: Math.max(
currentBlock?.height || BLOCK_DIMENSIONS.MIN_HEIGHT,
BLOCK_DIMENSIONS.MIN_HEIGHT
),
}
return clampPositionToContainer(nodePosition, containerDimensions, blockDimensions)
}
/**
* Computes position updates for multiple nodes, clamping each to its parent container.
* Used for batch position updates after multi-node drag or selection drag.
*/
export function computeClampedPositionUpdates(
nodes: Node[],
blocks: Record<string, BlockData>,
allNodes: Node[]
): Array<{ id: string; position: { x: number; y: number } }> {
return nodes.map((node) => ({
id: node.id,
position: getClampedPositionForNode(node.id, node.position, blocks, allNodes),
}))
}
interface ParentUpdateEntry {
blockId: string
newParentId: string
affectedEdges: Edge[]
}
/**
* Computes parent update entries for nodes being moved into a subflow.
* Only includes "boundary edges" - edges that cross the selection boundary
* (one end inside selection, one end outside). Edges between nodes in the
* selection are preserved.
*/
export function computeParentUpdateEntries(
validNodes: Node[],
allEdges: Edge[],
targetParentId: string
): ParentUpdateEntry[] {
const movingNodeIds = new Set(validNodes.map((n) => n.id))
// Find edges that cross the boundary (one end inside selection, one end outside)
// Edges between nodes in the selection should stay intact
const boundaryEdges = allEdges.filter((e) => {
const sourceInSelection = movingNodeIds.has(e.source)
const targetInSelection = movingNodeIds.has(e.target)
// Only remove if exactly one end is in the selection (crosses boundary)
return sourceInSelection !== targetInSelection
})
// Build updates for all valid nodes
return validNodes.map((n) => {
// Only include boundary edges connected to this specific node
const edgesForThisNode = boundaryEdges.filter((e) => e.source === n.id || e.target === n.id)
return {
blockId: n.id,
newParentId: targetParentId,
affectedEdges: edgesForThisNode,
}
})
}

View File

@@ -5,12 +5,19 @@ import { Handle, type NodeProps, Position } from 'reactflow'
import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { getBlock } from '@/blocks'
/** Execution status for blocks in preview mode */
type ExecutionStatus = 'success' | 'error' | 'not-executed'
interface WorkflowPreviewBlockData {
type: string
name: string
isTrigger?: boolean
horizontalHandles?: boolean
enabled?: boolean
/** Whether this block is selected in preview mode */
isPreviewSelected?: boolean
/** Execution status for highlighting error/success states */
executionStatus?: ExecutionStatus
}
/**
@@ -21,18 +28,20 @@ interface WorkflowPreviewBlockData {
* Used in template cards and other preview contexts for performance.
*/
function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>) {
const { type, name, isTrigger = false, horizontalHandles = false, enabled = true } = data
const {
type,
name,
isTrigger = false,
horizontalHandles = false,
enabled = true,
isPreviewSelected = false,
executionStatus,
} = data
const blockConfig = getBlock(type)
if (!blockConfig) {
return null
}
const IconComponent = blockConfig.icon
const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger
const visibleSubBlocks = useMemo(() => {
if (!blockConfig.subBlocks) return []
if (!blockConfig?.subBlocks) return []
return blockConfig.subBlocks.filter((subBlock) => {
if (subBlock.hidden) return false
@@ -41,7 +50,14 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
if (subBlock.mode === 'advanced') return false
return true
})
}, [blockConfig.subBlocks])
}, [blockConfig?.subBlocks])
if (!blockConfig) {
return null
}
const IconComponent = blockConfig.icon
const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger
const hasSubBlocks = visibleSubBlocks.length > 0
const showErrorRow = !isStarterOrTrigger
@@ -49,8 +65,24 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
const horizontalHandleClass = '!border-none !bg-[var(--surface-7)] !h-5 !w-[7px] !rounded-[2px]'
const verticalHandleClass = '!border-none !bg-[var(--surface-7)] !h-[7px] !w-5 !rounded-[2px]'
const hasError = executionStatus === 'error'
const hasSuccess = executionStatus === 'success'
return (
<div className='relative w-[250px] select-none rounded-[8px] border border-[var(--border)] bg-[var(--surface-2)]'>
{/* Selection ring overlay (takes priority over execution rings) */}
{isPreviewSelected && (
<div className='pointer-events-none absolute inset-0 z-40 rounded-[8px] ring-[1.75px] ring-[var(--brand-secondary)]' />
)}
{/* Success ring overlay (only shown if not selected) */}
{!isPreviewSelected && hasSuccess && (
<div className='pointer-events-none absolute inset-0 z-40 rounded-[8px] ring-[1.75px] ring-[var(--brand-tertiary-2)]' />
)}
{/* Error ring overlay (only shown if not selected) */}
{!isPreviewSelected && hasError && (
<div className='pointer-events-none absolute inset-0 z-40 rounded-[8px] ring-[1.75px] ring-[var(--text-error)]' />
)}
{/* Target handle - not shown for triggers/starters */}
{!isStarterOrTrigger && (
<Handle
@@ -128,4 +160,20 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
)
}
export const WorkflowPreviewBlock = memo(WorkflowPreviewBlockInner)
function shouldSkipPreviewBlockRender(
prevProps: NodeProps<WorkflowPreviewBlockData>,
nextProps: NodeProps<WorkflowPreviewBlockData>
): boolean {
return (
prevProps.id === nextProps.id &&
prevProps.data.type === nextProps.data.type &&
prevProps.data.name === nextProps.data.name &&
prevProps.data.isTrigger === nextProps.data.isTrigger &&
prevProps.data.horizontalHandles === nextProps.data.horizontalHandles &&
prevProps.data.enabled === nextProps.data.enabled &&
prevProps.data.isPreviewSelected === nextProps.data.isPreviewSelected &&
prevProps.data.executionStatus === nextProps.data.executionStatus
)
}
export const WorkflowPreviewBlock = memo(WorkflowPreviewBlockInner, shouldSkipPreviewBlockRender)

View File

@@ -10,6 +10,8 @@ interface WorkflowPreviewSubflowData {
width?: number
height?: number
kind: 'loop' | 'parallel'
/** Whether this subflow is selected in preview mode */
isPreviewSelected?: boolean
}
/**
@@ -19,7 +21,7 @@ interface WorkflowPreviewSubflowData {
* Used in template cards and other preview contexts for performance.
*/
function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowData>) {
const { name, width = 500, height = 300, kind } = data
const { name, width = 500, height = 300, kind, isPreviewSelected = false } = data
const isLoop = kind === 'loop'
const BlockIcon = isLoop ? RepeatIcon : SplitIcon
@@ -42,6 +44,11 @@ function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowD
height,
}}
>
{/* Selection ring overlay */}
{isPreviewSelected && (
<div className='pointer-events-none absolute inset-0 z-40 rounded-[8px] ring-[1.75px] ring-[var(--brand-secondary)]' />
)}
{/* Target handle on left (input to the subflow) */}
<Handle
type='target'
@@ -55,29 +62,37 @@ function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowD
}}
/>
{/* Header - matches actual subflow header */}
<div className='flex items-center gap-[10px] rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'>
<div
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
style={{ backgroundColor: blockIconBg }}
>
<BlockIcon className='h-[16px] w-[16px] text-white' />
{/* Header - matches actual subflow header structure */}
<div className='flex items-center justify-between rounded-t-[8px] border-[var(--border)] border-b bg-[var(--surface-2)] py-[8px] pr-[12px] pl-[8px]'>
<div className='flex min-w-0 flex-1 items-center gap-[10px]'>
<div
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
style={{ backgroundColor: blockIconBg }}
>
<BlockIcon className='h-[16px] w-[16px] text-white' />
</div>
<span className='font-medium text-[16px]' title={blockName}>
{blockName}
</span>
</div>
<span className='font-medium text-[16px]' title={blockName}>
{blockName}
</span>
</div>
{/* Start handle inside - connects to first block in subflow */}
<div className='absolute top-[56px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'>
<span className='font-medium text-[14px] text-white'>Start</span>
<Handle
type='source'
position={Position.Right}
id={startHandleId}
className={rightHandleClass}
style={{ right: '-8px', top: '50%', transform: 'translateY(-50%)' }}
/>
{/* Content area - matches workflow structure */}
<div
className='h-[calc(100%-50px)] pt-[16px] pr-[80px] pb-[16px] pl-[16px]'
style={{ position: 'relative' }}
>
{/* Subflow Start - connects to first block in subflow */}
<div className='absolute top-[16px] left-[16px] flex items-center justify-center rounded-[8px] border border-[var(--border-1)] bg-[var(--surface-2)] px-[12px] py-[6px]'>
<span className='font-medium text-[14px] text-[var(--text-primary)]'>Start</span>
<Handle
type='source'
position={Position.Right}
id={startHandleId}
className={rightHandleClass}
style={{ right: '-8px', top: '50%', transform: 'translateY(-50%)' }}
/>
</div>
</div>
{/* End source handle on right (output from the subflow) */}

View File

@@ -1,2 +1,2 @@
export { BlockDetailsSidebar } from './components/block-details-sidebar'
export { WorkflowPreview } from './preview'
export { getLeftmostBlockId, WorkflowPreview } from './preview'

View File

@@ -1,6 +1,6 @@
'use client'
import { useEffect, useMemo } from 'react'
import { useEffect, useMemo, useRef } from 'react'
import ReactFlow, {
ConnectionLineType,
type Edge,
@@ -14,23 +14,114 @@ import 'reactflow/dist/style.css'
import { createLogger } from '@sim/logger'
import { cn } from '@/lib/core/utils/cn'
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
import { NoteBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/note-block/note-block'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
import { estimateBlockDimensions } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-node-utilities'
import { WorkflowPreviewBlock } from '@/app/workspace/[workspaceId]/w/components/preview/components/block'
import { WorkflowPreviewSubflow } from '@/app/workspace/[workspaceId]/w/components/preview/components/subflow'
import { getBlock } from '@/blocks'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowPreview')
/**
* Gets block dimensions for preview purposes.
* For containers, uses stored dimensions or defaults.
* For regular blocks, uses stored height or estimates based on type.
*/
function getPreviewBlockDimensions(block: BlockState): { width: number; height: number } {
if (block.type === 'loop' || block.type === 'parallel') {
return {
width: block.data?.width
? Math.max(block.data.width, CONTAINER_DIMENSIONS.MIN_WIDTH)
: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: block.data?.height
? Math.max(block.data.height, CONTAINER_DIMENSIONS.MIN_HEIGHT)
: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
}
}
if (block.height) {
return {
width: BLOCK_DIMENSIONS.FIXED_WIDTH,
height: Math.max(block.height, BLOCK_DIMENSIONS.MIN_HEIGHT),
}
}
return estimateBlockDimensions(block.type)
}
/**
* Calculates container dimensions based on child block positions and sizes.
* Mirrors the logic from useNodeUtilities.calculateLoopDimensions.
*/
function calculateContainerDimensions(
containerId: string,
blocks: Record<string, BlockState>
): { width: number; height: number } {
const childBlocks = Object.values(blocks).filter((block) => block?.data?.parentId === containerId)
if (childBlocks.length === 0) {
return {
width: CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
height: CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
}
}
let maxRight = 0
let maxBottom = 0
for (const child of childBlocks) {
if (!child?.position) continue
const { width: childWidth, height: childHeight } = getPreviewBlockDimensions(child)
maxRight = Math.max(maxRight, child.position.x + childWidth)
maxBottom = Math.max(maxBottom, child.position.y + childHeight)
}
const width = Math.max(
CONTAINER_DIMENSIONS.DEFAULT_WIDTH,
maxRight + CONTAINER_DIMENSIONS.RIGHT_PADDING
)
const height = Math.max(
CONTAINER_DIMENSIONS.DEFAULT_HEIGHT,
maxBottom + CONTAINER_DIMENSIONS.BOTTOM_PADDING
)
return { width, height }
}
/**
* Finds the leftmost block ID from a workflow state.
* Returns the block with the smallest x position, excluding subflow containers (loop/parallel).
*/
export function getLeftmostBlockId(workflowState: WorkflowState | null | undefined): string | null {
if (!workflowState?.blocks) return null
let leftmostId: string | null = null
let minX = Number.POSITIVE_INFINITY
for (const [blockId, block] of Object.entries(workflowState.blocks)) {
if (!block || block.type === 'loop' || block.type === 'parallel') continue
const x = block.position?.x ?? Number.POSITIVE_INFINITY
if (x < minX) {
minX = x
leftmostId = blockId
}
}
return leftmostId
}
/** Execution status for edges/nodes in the preview */
type ExecutionStatus = 'success' | 'error' | 'not-executed'
interface WorkflowPreviewProps {
workflowState: WorkflowState
showSubBlocks?: boolean
className?: string
height?: string | number
width?: string | number
@@ -39,12 +130,18 @@ interface WorkflowPreviewProps {
defaultZoom?: number
fitPadding?: number
onNodeClick?: (blockId: string, mousePosition: { x: number; y: number }) => void
/** Callback when a node is right-clicked */
onNodeContextMenu?: (blockId: string, mousePosition: { x: number; y: number }) => void
/** Callback when the canvas (empty area) is clicked */
onPaneClick?: () => void
/** Use lightweight blocks for better performance in template cards */
lightweight?: boolean
/** Cursor style to show when hovering the canvas */
cursorStyle?: 'default' | 'pointer' | 'grab'
/** Map of executed block IDs to their status for highlighting the execution path */
executedBlocks?: Record<string, { status: string }>
/** Currently selected block ID for highlighting */
selectedBlockId?: string | null
}
/**
@@ -73,44 +170,49 @@ const edgeTypes: EdgeTypes = {
}
interface FitViewOnChangeProps {
nodes: Node[]
nodeIds: string
fitPadding: number
}
/**
* Helper component that calls fitView when nodes change.
* Helper component that calls fitView when the set of nodes changes.
* Only triggers on actual node additions/removals, not on selection changes.
* Must be rendered inside ReactFlowProvider.
*/
function FitViewOnChange({ nodes, fitPadding }: FitViewOnChangeProps) {
function FitViewOnChange({ nodeIds, fitPadding }: FitViewOnChangeProps) {
const { fitView } = useReactFlow()
const hasFittedRef = useRef(false)
useEffect(() => {
if (nodes.length > 0) {
if (nodeIds.length > 0 && !hasFittedRef.current) {
hasFittedRef.current = true
// Small delay to ensure nodes are rendered before fitting
const timeoutId = setTimeout(() => {
fitView({ padding: fitPadding, duration: 200 })
}, 50)
return () => clearTimeout(timeoutId)
}
}, [nodes, fitPadding, fitView])
}, [nodeIds, fitPadding, fitView])
return null
}
export function WorkflowPreview({
workflowState,
showSubBlocks = true,
className,
height = '100%',
width = '100%',
isPannable = false,
isPannable = true,
defaultPosition,
defaultZoom = 0.8,
fitPadding = 0.25,
onNodeClick,
onNodeContextMenu,
onPaneClick,
lightweight = false,
cursorStyle = 'grab',
executedBlocks,
selectedBlockId,
}: WorkflowPreviewProps) {
const nodeTypes = lightweight ? lightweightNodeTypes : fullNodeTypes
const isValidWorkflowState = workflowState?.blocks && workflowState.edges
@@ -184,6 +286,8 @@ export function WorkflowPreview({
if (lightweight) {
if (block.type === 'loop' || block.type === 'parallel') {
const isSelected = selectedBlockId === blockId
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
nodeArray.push({
id: blockId,
type: 'subflowNode',
@@ -191,31 +295,56 @@ export function WorkflowPreview({
draggable: false,
data: {
name: block.name,
width: block.data?.width || 500,
height: block.data?.height || 300,
width: dimensions.width,
height: dimensions.height,
kind: block.type as 'loop' | 'parallel',
isPreviewSelected: isSelected,
},
})
return
}
const isSelected = selectedBlockId === blockId
let lightweightExecutionStatus: ExecutionStatus | undefined
if (executedBlocks) {
const blockExecution = executedBlocks[blockId]
if (blockExecution) {
if (blockExecution.status === 'error') {
lightweightExecutionStatus = 'error'
} else if (blockExecution.status === 'success') {
lightweightExecutionStatus = 'success'
} else {
lightweightExecutionStatus = 'not-executed'
}
} else {
lightweightExecutionStatus = 'not-executed'
}
}
nodeArray.push({
id: blockId,
type: 'workflowBlock',
position: absolutePosition,
draggable: false,
// Blocks inside subflows need higher z-index to appear above the container
zIndex: block.data?.parentId ? 10 : undefined,
data: {
type: block.type,
name: block.name,
isTrigger: block.triggerMode === true,
horizontalHandles: block.horizontalHandles ?? false,
enabled: block.enabled ?? true,
isPreviewSelected: isSelected,
executionStatus: lightweightExecutionStatus,
},
})
return
}
if (block.type === 'loop') {
const isSelected = selectedBlockId === blockId
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
nodeArray.push({
id: blockId,
type: 'subflowNode',
@@ -226,10 +355,11 @@ export function WorkflowPreview({
data: {
...block.data,
name: block.name,
width: block.data?.width || 500,
height: block.data?.height || 300,
width: dimensions.width,
height: dimensions.height,
state: 'valid',
isPreview: true,
isPreviewSelected: isSelected,
kind: 'loop',
},
})
@@ -237,6 +367,8 @@ export function WorkflowPreview({
}
if (block.type === 'parallel') {
const isSelected = selectedBlockId === blockId
const dimensions = calculateContainerDimensions(blockId, workflowState.blocks)
nodeArray.push({
id: blockId,
type: 'subflowNode',
@@ -247,10 +379,11 @@ export function WorkflowPreview({
data: {
...block.data,
name: block.name,
width: block.data?.width || 500,
height: block.data?.height || 300,
width: dimensions.width,
height: dimensions.height,
state: 'valid',
isPreview: true,
isPreviewSelected: isSelected,
kind: 'parallel',
},
})
@@ -281,15 +414,15 @@ export function WorkflowPreview({
}
}
const isSelected = selectedBlockId === blockId
nodeArray.push({
id: blockId,
type: nodeType,
position: absolutePosition,
draggable: false,
className:
executionStatus && executionStatus !== 'not-executed'
? `execution-${executionStatus}`
: undefined,
// Blocks inside subflows need higher z-index to appear above the container
zIndex: block.data?.parentId ? 10 : undefined,
data: {
type: block.type,
config: blockConfig,
@@ -297,6 +430,7 @@ export function WorkflowPreview({
blockState: block,
canEdit: false,
isPreview: true,
isPreviewSelected: isSelected,
subBlockValues: block.subBlocks ?? {},
executionStatus,
},
@@ -308,11 +442,11 @@ export function WorkflowPreview({
blocksStructure,
loopsStructure,
parallelsStructure,
showSubBlocks,
workflowState.blocks,
isValidWorkflowState,
lightweight,
executedBlocks,
selectedBlockId,
])
const edges: Edge[] = useMemo(() => {
@@ -325,9 +459,8 @@ export function WorkflowPreview({
const targetExecuted = executedBlocks[edge.target]
if (sourceExecuted && targetExecuted) {
if (targetExecuted.status === 'error') {
executionStatus = 'error'
} else if (sourceExecuted.status === 'success' && targetExecuted.status === 'success') {
// Edge is success if source succeeded and target was executed (even if target errored)
if (sourceExecuted.status === 'success') {
executionStatus = 'success'
} else {
executionStatus = 'not-executed'
@@ -344,6 +477,8 @@ export function WorkflowPreview({
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
data: executionStatus ? { executionStatus } : undefined,
// Raise executed edges above default edges
zIndex: executionStatus === 'success' ? 10 : 0,
}
})
}, [edgesStructure, workflowState.edges, isValidWorkflowState, executedBlocks])
@@ -368,20 +503,19 @@ export function WorkflowPreview({
<ReactFlowProvider>
<div
style={{ height, width, backgroundColor: 'var(--bg)' }}
className={cn('preview-mode', className)}
className={cn('preview-mode', onNodeClick && 'interactive-nodes', className)}
>
<style>{`
${cursorStyle ? `.preview-mode .react-flow__pane { cursor: ${cursorStyle} !important; }` : ''}
/* Canvas cursor - grab on the flow container and pane */
.preview-mode .react-flow { cursor: ${cursorStyle}; }
.preview-mode .react-flow__pane { cursor: ${cursorStyle} !important; }
.preview-mode .react-flow__selectionpane { cursor: ${cursorStyle} !important; }
.preview-mode .react-flow__renderer { cursor: ${cursorStyle}; }
/* Execution status styling for nodes */
.preview-mode .react-flow__node.execution-success {
border-radius: 8px;
box-shadow: 0 0 0 4px var(--border-success);
}
.preview-mode .react-flow__node.execution-error {
border-radius: 8px;
box-shadow: 0 0 0 4px var(--text-error);
}
/* Node cursor - pointer on nodes when onNodeClick is provided */
.preview-mode.interactive-nodes .react-flow__node { cursor: pointer !important; }
.preview-mode.interactive-nodes .react-flow__node > div { cursor: pointer !important; }
.preview-mode.interactive-nodes .react-flow__node * { cursor: pointer !important; }
`}</style>
<ReactFlow
nodes={nodes}
@@ -391,7 +525,7 @@ export function WorkflowPreview({
connectionLineType={ConnectionLineType.SmoothStep}
fitView
fitViewOptions={{ padding: fitPadding }}
panOnScroll={false}
panOnScroll={isPannable}
panOnDrag={isPannable}
zoomOnScroll={false}
draggable={false}
@@ -414,8 +548,18 @@ export function WorkflowPreview({
}
: undefined
}
onNodeContextMenu={
onNodeContextMenu
? (event, node) => {
event.preventDefault()
event.stopPropagation()
onNodeContextMenu(node.id, { x: event.clientX, y: event.clientY })
}
: undefined
}
onPaneClick={onPaneClick}
/>
<FitViewOnChange nodes={nodes} fitPadding={fitPadding} />
<FitViewOnChange nodeIds={blocksStructure.ids} fitPadding={fitPadding} />
</div>
</ReactFlowProvider>
)

View File

@@ -165,7 +165,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
const { data: session } = useSession()
const queryClient = useQueryClient()
const { data: organizationsData } = useOrganizations()
const { data: subscriptionData } = useSubscriptionData()
const { data: subscriptionData } = useSubscriptionData({ enabled: isBillingEnabled })
const { data: ssoProvidersData, isLoading: isLoadingSSO } = useSSOProviders()
const activeOrganization = organizationsData?.activeOrganization

View File

@@ -4,6 +4,7 @@ import JSZip from 'jszip'
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
import { useFolderStore } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import type { Variable } from '@/stores/workflows/workflow/types'
const logger = createLogger('useExportWorkflow')
@@ -122,17 +123,12 @@ export function useExportWorkflow({
continue
}
// Fetch workflow variables
// Fetch workflow variables (API returns Record format directly)
const variablesResponse = await fetch(`/api/workflows/${workflowId}/variables`)
let workflowVariables: any[] = []
let workflowVariables: Record<string, Variable> | undefined
if (variablesResponse.ok) {
const variablesData = await variablesResponse.json()
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
workflowVariables = variablesData?.data
}
// Prepare export state

View File

@@ -2,8 +2,10 @@ import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger'
import {
exportWorkspaceToZip,
type FolderExportData,
type WorkflowExportData,
} from '@/lib/workflows/operations/import-export'
import type { Variable } from '@/stores/workflows/workflow/types'
const logger = createLogger('useExportWorkspace')
@@ -74,15 +76,10 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
}
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
let workflowVariables: any[] = []
let workflowVariables: Record<string, Variable> | undefined
if (variablesResponse.ok) {
const variablesData = await variablesResponse.json()
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
workflowVariables = variablesData?.data
}
workflowsToExport.push({
@@ -101,15 +98,13 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
}
}
const foldersToExport: Array<{
id: string
name: string
parentId: string | null
}> = (foldersData.folders || []).map((folder: any) => ({
id: folder.id,
name: folder.name,
parentId: folder.parentId,
}))
const foldersToExport: FolderExportData[] = (foldersData.folders || []).map(
(folder: FolderExportData) => ({
id: folder.id,
name: folder.name,
parentId: folder.parentId,
})
)
const zipBlob = await exportWorkspaceToZip(
workspaceName,

View File

@@ -79,21 +79,36 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
body: JSON.stringify(workflowData),
})
// Save variables if any
if (workflowData.variables && workflowData.variables.length > 0) {
const variablesPayload = workflowData.variables.map((v: any) => ({
id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
workflowId: newWorkflowId,
name: v.name,
type: v.type,
value: v.value,
}))
// Save variables if any (handle both legacy Array and current Record formats)
if (workflowData.variables) {
// Convert to Record format for API (handles backwards compatibility with old Array exports)
const variablesArray = Array.isArray(workflowData.variables)
? workflowData.variables
: Object.values(workflowData.variables)
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ variables: variablesPayload }),
})
if (variablesArray.length > 0) {
const variablesRecord: Record<
string,
{ id: string; workflowId: string; name: string; type: string; value: unknown }
> = {}
for (const v of variablesArray) {
const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID()
variablesRecord[id] = {
id,
workflowId: newWorkflowId,
name: v.name,
type: v.type,
value: v.value,
}
}
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ variables: variablesRecord }),
})
}
}
logger.info(`Imported workflow: ${workflowName}`)

View File

@@ -159,21 +159,36 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
continue
}
// Save variables if any
if (workflowData.variables && workflowData.variables.length > 0) {
const variablesPayload = workflowData.variables.map((v: any) => ({
id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
workflowId: newWorkflow.id,
name: v.name,
type: v.type,
value: v.value,
}))
// Save variables if any (handle both legacy Array and current Record formats)
if (workflowData.variables) {
// Convert to Record format for API (handles backwards compatibility with old Array exports)
const variablesArray = Array.isArray(workflowData.variables)
? workflowData.variables
: Object.values(workflowData.variables)
await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ variables: variablesPayload }),
})
if (variablesArray.length > 0) {
const variablesRecord: Record<
string,
{ id: string; workflowId: string; name: string; type: string; value: unknown }
> = {}
for (const v of variablesArray) {
const id = typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID()
variablesRecord[id] = {
id,
workflowId: newWorkflow.id,
name: v.name,
type: v.type,
value: v.value,
}
}
await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ variables: variablesRecord }),
})
}
}
logger.info(`Imported workflow: ${workflowName}`)

View File

@@ -112,7 +112,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
</td>
<td style={baseStyles.footerText}>
{brand.name}
{isHosted && <>, 80 Langton St, San Francisco, CA 94133, USA</>}
{isHosted && <>, 80 Langton St, San Francisco, CA 94103, USA</>}
</td>
<td style={baseStyles.gutter} width={spacing.gutter}>
&nbsp;

View File

@@ -427,9 +427,7 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
input: 'json',
},
outputs: {
response: {
input: 'json',
},
response: { type: 'json', description: 'Input response' },
},
enabled: true,
metadata: { id: 'starter', name: 'Starter Block' },
@@ -444,11 +442,9 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
headers: 'json',
},
outputs: {
response: {
data: 'json',
status: 'number',
headers: 'json',
},
data: { type: 'json', description: 'Response data' },
status: { type: 'number', description: 'Response status' },
headers: { type: 'json', description: 'Response headers' },
},
enabled: true,
metadata: { id: 'response', name: 'Response Block' },

View File

@@ -1,3 +1,5 @@
import type { LoopType, ParallelType } from '@/lib/workflows/types'
export enum BlockType {
PARALLEL = 'parallel',
LOOP = 'loop',
@@ -40,12 +42,8 @@ export const METADATA_ONLY_BLOCK_TYPES = [
BlockType.NOTE,
] as const
export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile'
export type SentinelType = 'start' | 'end'
export type ParallelType = 'collection' | 'count'
export const EDGE = {
CONDITION_PREFIX: 'condition-',
CONDITION_TRUE: 'condition-true',

View File

@@ -366,12 +366,12 @@ export class RouterBlockHandler implements BlockHandler {
let systemPrompt = ''
if (isAgentBlockType(targetBlock.metadata?.id)) {
const paramsPrompt = targetBlock.config?.params?.systemPrompt
const inputsPrompt = targetBlock.inputs?.systemPrompt
systemPrompt =
targetBlock.config?.params?.systemPrompt || targetBlock.inputs?.systemPrompt || ''
if (!systemPrompt && targetBlock.inputs) {
systemPrompt = targetBlock.inputs.systemPrompt || ''
}
(typeof paramsPrompt === 'string' ? paramsPrompt : '') ||
(typeof inputsPrompt === 'string' ? inputsPrompt : '') ||
''
}
return {

View File

@@ -28,6 +28,8 @@ async function fetchSubscriptionData(includeOrg = false) {
interface UseSubscriptionDataOptions {
/** Include organization membership and role data */
includeOrg?: boolean
/** Whether to enable the query (defaults to true) */
enabled?: boolean
}
/**
@@ -35,13 +37,14 @@ interface UseSubscriptionDataOptions {
* @param options - Optional configuration
*/
export function useSubscriptionData(options: UseSubscriptionDataOptions = {}) {
const { includeOrg = false } = options
const { includeOrg = false, enabled = true } = options
return useQuery({
queryKey: subscriptionKeys.user(includeOrg),
queryFn: () => fetchSubscriptionData(includeOrg),
staleTime: 30 * 1000,
placeholderData: keepPreviousData,
enabled,
})
}
@@ -58,17 +61,25 @@ async function fetchUsageLimitData() {
return response.json()
}
interface UseUsageLimitDataOptions {
/** Whether to enable the query (defaults to true) */
enabled?: boolean
}
/**
* Hook to fetch usage limit metadata
* Returns: currentLimit, minimumLimit, canEdit, plan, updatedAt
* Use this for editing usage limits, not for displaying current usage
*/
export function useUsageLimitData() {
export function useUsageLimitData(options: UseUsageLimitDataOptions = {}) {
const { enabled = true } = options
return useQuery({
queryKey: subscriptionKeys.usage(),
queryFn: fetchUsageLimitData,
staleTime: 30 * 1000,
placeholderData: keepPreviousData,
enabled,
})
}

View File

@@ -0,0 +1,155 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
interface UseCodeViewerFeaturesOptions {
/** Reference to the content container for scroll-to-match functionality */
contentRef?: React.RefObject<HTMLDivElement | null>
/** Initial wrap text state (ignored if externalWrapText is provided) */
initialWrapText?: boolean
/** External wrap text state (e.g., from Zustand store) */
externalWrapText?: boolean
/** External setter for wrap text (required if externalWrapText is provided) */
onWrapTextChange?: (wrap: boolean) => void
/** Callback when escape is pressed (optional, for custom handling) */
onEscape?: () => void
}
interface UseCodeViewerFeaturesReturn {
wrapText: boolean
setWrapText: (wrap: boolean) => void
toggleWrapText: () => void
isSearchActive: boolean
searchQuery: string
setSearchQuery: (query: string) => void
matchCount: number
currentMatchIndex: number
activateSearch: () => void
closeSearch: () => void
goToNextMatch: () => void
goToPreviousMatch: () => void
handleMatchCountChange: (count: number) => void
searchInputRef: React.RefObject<HTMLInputElement | null>
}
/**
* Reusable hook for Code.Viewer features: search and wrap text functionality.
* Supports both internal state and external state (e.g., from Zustand) for wrapText.
*/
export function useCodeViewerFeatures(
options: UseCodeViewerFeaturesOptions = {}
): UseCodeViewerFeaturesReturn {
const {
contentRef,
initialWrapText = true,
externalWrapText,
onWrapTextChange,
onEscape,
} = options
// Use external state if provided, otherwise use internal state
const [internalWrapText, setInternalWrapText] = useState(initialWrapText)
const wrapText = externalWrapText !== undefined ? externalWrapText : internalWrapText
const setWrapText = onWrapTextChange ?? setInternalWrapText
const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('')
const [matchCount, setMatchCount] = useState(0)
const [currentMatchIndex, setCurrentMatchIndex] = useState(0)
const searchInputRef = useRef<HTMLInputElement>(null)
const toggleWrapText = useCallback(() => {
setWrapText(!wrapText)
}, [wrapText, setWrapText])
const activateSearch = useCallback(() => {
setIsSearchActive(true)
setTimeout(() => {
searchInputRef.current?.focus()
}, 0)
}, [])
const closeSearch = useCallback(() => {
setIsSearchActive(false)
setSearchQuery('')
setMatchCount(0)
setCurrentMatchIndex(0)
}, [])
const goToNextMatch = useCallback(() => {
if (matchCount === 0) return
setCurrentMatchIndex((prev) => (prev + 1) % matchCount)
}, [matchCount])
const goToPreviousMatch = useCallback(() => {
if (matchCount === 0) return
setCurrentMatchIndex((prev) => (prev - 1 + matchCount) % matchCount)
}, [matchCount])
const handleMatchCountChange = useCallback((count: number) => {
setMatchCount(count)
setCurrentMatchIndex(0)
}, [])
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Escape' && isSearchActive) {
e.preventDefault()
closeSearch()
onEscape?.()
}
}
window.addEventListener('keydown', handleKeyDown)
return () => window.removeEventListener('keydown', handleKeyDown)
}, [isSearchActive, closeSearch, onEscape])
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (!isSearchActive) return
const isSearchInputFocused = document.activeElement === searchInputRef.current
if (e.key === 'Enter' && isSearchInputFocused && matchCount > 0) {
e.preventDefault()
if (e.shiftKey) {
goToPreviousMatch()
} else {
goToNextMatch()
}
}
}
window.addEventListener('keydown', handleKeyDown)
return () => window.removeEventListener('keydown', handleKeyDown)
}, [isSearchActive, matchCount, goToNextMatch, goToPreviousMatch])
useEffect(() => {
if (!isSearchActive || matchCount === 0 || !contentRef?.current) return
const matchElements = contentRef.current.querySelectorAll('[data-search-match]')
const currentElement = matchElements[currentMatchIndex]
if (currentElement) {
currentElement.scrollIntoView({ block: 'center' })
}
}, [currentMatchIndex, isSearchActive, matchCount, contentRef])
return {
wrapText,
setWrapText,
toggleWrapText,
isSearchActive,
searchQuery,
setSearchQuery,
matchCount,
currentMatchIndex,
activateSearch,
closeSearch,
goToNextMatch,
goToPreviousMatch,
handleMatchCountChange,
searchInputRef,
}
}

View File

@@ -6,6 +6,17 @@ import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import { getBlock } from '@/blocks'
import { useUndoRedo } from '@/hooks/use-undo-redo'
import {
BLOCK_OPERATIONS,
BLOCKS_OPERATIONS,
EDGE_OPERATIONS,
EDGES_OPERATIONS,
OPERATION_TARGETS,
SUBBLOCK_OPERATIONS,
SUBFLOW_OPERATIONS,
VARIABLE_OPERATIONS,
WORKFLOW_OPERATIONS,
} from '@/socket/constants'
import { useNotificationStore } from '@/stores/notifications'
import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store'
import { usePanelEditorStore } from '@/stores/panel/editor/store'
@@ -20,8 +31,6 @@ import type { BlockState, Loop, Parallel, Position } from '@/stores/workflows/wo
const logger = createLogger('CollaborativeWorkflow')
const WEBHOOK_SUBBLOCK_FIELDS = ['webhookId', 'triggerPath']
export function useCollaborativeWorkflow() {
const undoRedo = useUndoRedo()
const isUndoRedoInProgress = useRef(false)
@@ -33,7 +42,7 @@ export function useCollaborativeWorkflow() {
const { blockId, before, after } = e.detail || {}
if (!blockId || !before || !after) return
if (isUndoRedoInProgress.current) return
undoRedo.recordMove(blockId, before, after)
undoRedo.recordBatchMoveBlocks([{ blockId, before, after }])
}
const parentUpdateHandler = (e: any) => {
@@ -197,9 +206,9 @@ export function useCollaborativeWorkflow() {
isApplyingRemoteChange.current = true
try {
if (target === 'block') {
if (target === OPERATION_TARGETS.BLOCK) {
switch (operation) {
case 'update-position': {
case BLOCK_OPERATIONS.UPDATE_POSITION: {
const blockId = payload.id
if (!data.timestamp) {
@@ -227,22 +236,22 @@ export function useCollaborativeWorkflow() {
}
break
}
case 'update-name':
case BLOCK_OPERATIONS.UPDATE_NAME:
workflowStore.updateBlockName(payload.id, payload.name)
break
case 'toggle-enabled':
case BLOCK_OPERATIONS.TOGGLE_ENABLED:
workflowStore.toggleBlockEnabled(payload.id)
break
case 'update-parent':
case BLOCK_OPERATIONS.UPDATE_PARENT:
workflowStore.updateParentId(payload.id, payload.parentId, payload.extent)
break
case 'update-advanced-mode':
case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE:
workflowStore.setBlockAdvancedMode(payload.id, payload.advancedMode)
break
case 'update-trigger-mode':
case BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE:
workflowStore.setBlockTriggerMode(payload.id, payload.triggerMode)
break
case 'toggle-handles': {
case BLOCK_OPERATIONS.TOGGLE_HANDLES: {
const currentBlock = workflowStore.blocks[payload.id]
if (currentBlock && currentBlock.horizontalHandles !== payload.horizontalHandles) {
workflowStore.toggleBlockHandles(payload.id)
@@ -250,9 +259,9 @@ export function useCollaborativeWorkflow() {
break
}
}
} else if (target === 'blocks') {
} else if (target === OPERATION_TARGETS.BLOCKS) {
switch (operation) {
case 'batch-update-positions': {
case BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS: {
const { updates } = payload
if (Array.isArray(updates)) {
updates.forEach(({ id, position }: { id: string; position: Position }) => {
@@ -264,12 +273,12 @@ export function useCollaborativeWorkflow() {
break
}
}
} else if (target === 'edge') {
} else if (target === OPERATION_TARGETS.EDGE) {
switch (operation) {
case 'add':
case EDGE_OPERATIONS.ADD:
workflowStore.addEdge(payload as Edge)
break
case 'remove': {
case EDGE_OPERATIONS.REMOVE: {
workflowStore.removeEdge(payload.id)
const updatedBlocks = useWorkflowStore.getState().blocks
@@ -290,9 +299,44 @@ export function useCollaborativeWorkflow() {
break
}
}
} else if (target === 'subflow') {
} else if (target === OPERATION_TARGETS.EDGES) {
switch (operation) {
case 'update':
case EDGES_OPERATIONS.BATCH_REMOVE_EDGES: {
const { ids } = payload
if (Array.isArray(ids)) {
ids.forEach((id: string) => {
workflowStore.removeEdge(id)
})
const updatedBlocks = useWorkflowStore.getState().blocks
const updatedEdges = useWorkflowStore.getState().edges
const graph = {
blocksById: updatedBlocks,
edgesById: Object.fromEntries(updatedEdges.map((e) => [e.id, e])),
}
const undoRedoStore = useUndoRedoStore.getState()
const stackKeys = Object.keys(undoRedoStore.stacks)
stackKeys.forEach((key) => {
const [wfId, uId] = key.split(':')
if (wfId === activeWorkflowId) {
undoRedoStore.pruneInvalidEntries(wfId, uId, graph)
}
})
}
break
}
case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
const { edges } = payload
if (Array.isArray(edges)) {
edges.forEach((edge: Edge) => workflowStore.addEdge(edge))
}
break
}
}
} else if (target === OPERATION_TARGETS.SUBFLOW) {
switch (operation) {
case SUBFLOW_OPERATIONS.UPDATE:
// Handle subflow configuration updates (loop/parallel type changes, etc.)
if (payload.type === 'loop') {
const { config } = payload
@@ -325,9 +369,9 @@ export function useCollaborativeWorkflow() {
}
break
}
} else if (target === 'variable') {
} else if (target === OPERATION_TARGETS.VARIABLE) {
switch (operation) {
case 'add':
case VARIABLE_OPERATIONS.ADD:
variablesStore.addVariable(
{
workflowId: payload.workflowId,
@@ -338,7 +382,7 @@ export function useCollaborativeWorkflow() {
payload.id
)
break
case 'variable-update':
case VARIABLE_OPERATIONS.UPDATE:
if (payload.field === 'name') {
variablesStore.updateVariable(payload.variableId, { name: payload.value })
} else if (payload.field === 'value') {
@@ -347,13 +391,13 @@ export function useCollaborativeWorkflow() {
variablesStore.updateVariable(payload.variableId, { type: payload.value })
}
break
case 'remove':
case VARIABLE_OPERATIONS.REMOVE:
variablesStore.deleteVariable(payload.variableId)
break
}
} else if (target === 'workflow') {
} else if (target === OPERATION_TARGETS.WORKFLOW) {
switch (operation) {
case 'replace-state':
case WORKFLOW_OPERATIONS.REPLACE_STATE:
if (payload.state) {
logger.info('Received workflow state replacement from remote user', {
userId,
@@ -386,9 +430,9 @@ export function useCollaborativeWorkflow() {
}
}
if (target === 'blocks') {
if (target === OPERATION_TARGETS.BLOCKS) {
switch (operation) {
case 'batch-add-blocks': {
case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: {
const {
blocks,
edges,
@@ -456,7 +500,7 @@ export function useCollaborativeWorkflow() {
logger.info('Successfully applied batch-add-blocks from remote user')
break
}
case 'batch-remove-blocks': {
case BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS: {
const { ids } = payload
logger.info('Received batch-remove-blocks from remote user', {
userId,
@@ -722,7 +766,12 @@ export function useCollaborativeWorkflow() {
)
const collaborativeBatchUpdatePositions = useCallback(
(updates: Array<{ id: string; position: Position }>) => {
(
updates: Array<{ id: string; position: Position }>,
options?: {
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
}
) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
return
@@ -735,8 +784,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
operation: 'batch-update-positions',
target: 'blocks',
operation: BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS,
target: OPERATION_TARGETS.BLOCKS,
payload: { updates },
},
workflowId: activeWorkflowId || '',
@@ -746,8 +795,31 @@ export function useCollaborativeWorkflow() {
updates.forEach(({ id, position }) => {
workflowStore.updateBlockPosition(id, position)
})
if (options?.previousPositions && options.previousPositions.size > 0) {
const moves = updates
.filter((u) => options.previousPositions!.has(u.id))
.map((u) => {
const prev = options.previousPositions!.get(u.id)!
const block = workflowStore.blocks[u.id]
return {
blockId: u.id,
before: prev,
after: {
x: u.position.x,
y: u.position.y,
parentId: block?.data?.parentId,
},
}
})
.filter((m) => m.before.x !== m.after.x || m.before.y !== m.after.y)
if (moves.length > 0) {
undoRedo.recordBatchMoveBlocks(moves)
}
}
},
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore]
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, workflowStore, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -781,63 +853,167 @@ export function useCollaborativeWorkflow() {
return { success: false, error: `Block name "${trimmedName}" already exists` }
}
executeQueuedOperation('update-name', 'block', { id, name: trimmedName }, () => {
const result = workflowStore.updateBlockName(id, trimmedName)
executeQueuedOperation(
BLOCK_OPERATIONS.UPDATE_NAME,
OPERATION_TARGETS.BLOCK,
{ id, name: trimmedName },
() => {
const result = workflowStore.updateBlockName(id, trimmedName)
if (result.success && result.changedSubblocks.length > 0) {
logger.info('Emitting cascaded subblock updates from block rename', {
blockId: id,
newName: trimmedName,
updateCount: result.changedSubblocks.length,
})
if (result.success && result.changedSubblocks.length > 0) {
logger.info('Emitting cascaded subblock updates from block rename', {
blockId: id,
newName: trimmedName,
updateCount: result.changedSubblocks.length,
})
result.changedSubblocks.forEach(
({
blockId,
subBlockId,
newValue,
}: {
blockId: string
subBlockId: string
newValue: any
}) => {
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: 'subblock-update',
target: 'subblock',
payload: { blockId, subblockId: subBlockId, value: newValue },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
}
)
result.changedSubblocks.forEach(
({
blockId,
subBlockId,
newValue,
}: {
blockId: string
subBlockId: string
newValue: any
}) => {
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: SUBBLOCK_OPERATIONS.UPDATE,
target: OPERATION_TARGETS.SUBBLOCK,
payload: { blockId, subblockId: subBlockId, value: newValue },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
}
)
}
}
})
)
return { success: true }
},
[executeQueuedOperation, workflowStore, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockEnabled = useCallback(
(id: string) => {
executeQueuedOperation('toggle-enabled', 'block', { id }, () =>
const collaborativeBatchToggleBlockEnabled = useCallback(
(ids: string[]) => {
if (ids.length === 0) return
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
for (const id of ids) {
const block = workflowStore.blocks[id]
if (block) {
previousStates[id] = block.enabled
validIds.push(id)
}
}
if (validIds.length === 0) return
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds: validIds, previousStates },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
for (const id of validIds) {
workflowStore.toggleBlockEnabled(id)
}
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
},
[addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo]
)
const collaborativeUpdateParentId = useCallback(
(id: string, parentId: string, extent: 'parent') => {
executeQueuedOperation(
BLOCK_OPERATIONS.UPDATE_PARENT,
OPERATION_TARGETS.BLOCK,
{ id, parentId, extent },
() => workflowStore.updateParentId(id, parentId, extent)
)
},
[executeQueuedOperation, workflowStore]
)
const collaborativeUpdateParentId = useCallback(
(id: string, parentId: string, extent: 'parent') => {
executeQueuedOperation('update-parent', 'block', { id, parentId, extent }, () =>
workflowStore.updateParentId(id, parentId, extent)
)
const collaborativeBatchUpdateParent = useCallback(
(
updates: Array<{
blockId: string
newParentId: string | null
newPosition: { x: number; y: number }
affectedEdges: Edge[]
}>
) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
return
}
if (updates.length === 0) return
const batchUpdates = updates.map((u) => {
const block = workflowStore.blocks[u.blockId]
const oldParentId = block?.data?.parentId
const oldPosition = block?.position || { x: 0, y: 0 }
return {
blockId: u.blockId,
oldParentId,
newParentId: u.newParentId || undefined,
oldPosition,
newPosition: u.newPosition,
affectedEdges: u.affectedEdges,
}
})
for (const update of updates) {
if (update.affectedEdges.length > 0) {
update.affectedEdges.forEach((e) => workflowStore.removeEdge(e.id))
}
workflowStore.updateBlockPosition(update.blockId, update.newPosition)
if (update.newParentId) {
workflowStore.updateParentId(update.blockId, update.newParentId, 'parent')
}
}
undoRedo.recordBatchUpdateParent(batchUpdates)
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT,
target: OPERATION_TARGETS.BLOCKS,
payload: {
updates: batchUpdates.map((u) => ({
id: u.blockId,
parentId: u.newParentId || '',
position: u.newPosition,
})),
},
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
[executeQueuedOperation, workflowStore]
[isInActiveRoom, workflowStore, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -848,8 +1024,8 @@ export function useCollaborativeWorkflow() {
const newAdvancedMode = !currentBlock.advancedMode
executeQueuedOperation(
'update-advanced-mode',
'block',
BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
OPERATION_TARGETS.BLOCK,
{ id, advancedMode: newAdvancedMode },
() => workflowStore.toggleBlockAdvancedMode(id)
)
@@ -879,8 +1055,8 @@ export function useCollaborativeWorkflow() {
}
executeQueuedOperation(
'update-trigger-mode',
'block',
BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE,
OPERATION_TARGETS.BLOCK,
{ id, triggerMode: newTriggerMode },
() => workflowStore.toggleBlockTriggerMode(id)
)
@@ -888,27 +1064,50 @@ export function useCollaborativeWorkflow() {
[executeQueuedOperation, workflowStore]
)
const collaborativeToggleBlockHandles = useCallback(
(id: string) => {
const currentBlock = workflowStore.blocks[id]
if (!currentBlock) return
const collaborativeBatchToggleBlockHandles = useCallback(
(ids: string[]) => {
if (ids.length === 0) return
const newHorizontalHandles = !currentBlock.horizontalHandles
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
executeQueuedOperation(
'toggle-handles',
'block',
{ id, horizontalHandles: newHorizontalHandles },
() => workflowStore.toggleBlockHandles(id)
)
for (const id of ids) {
const block = workflowStore.blocks[id]
if (block) {
previousStates[id] = block.horizontalHandles ?? false
validIds.push(id)
}
}
if (validIds.length === 0) return
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds: validIds, previousStates },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
for (const id of validIds) {
workflowStore.toggleBlockHandles(id)
}
undoRedo.recordBatchToggleHandles(validIds, previousStates)
},
[executeQueuedOperation, workflowStore]
[addToQueue, activeWorkflowId, session?.user?.id, workflowStore, undoRedo]
)
const collaborativeAddEdge = useCallback(
(edge: Edge) => {
executeQueuedOperation('add', 'edge', edge, () => workflowStore.addEdge(edge))
// Only record edge addition if it's not part of a parent update operation
executeQueuedOperation(EDGE_OPERATIONS.ADD, OPERATION_TARGETS.EDGE, edge, () =>
workflowStore.addEdge(edge)
)
if (!skipEdgeRecording.current) {
undoRedo.recordAddEdge(edge.id)
}
@@ -920,13 +1119,11 @@ export function useCollaborativeWorkflow() {
(edgeId: string) => {
const edge = workflowStore.edges.find((e) => e.id === edgeId)
// Skip if edge doesn't exist (already removed during cascade deletion)
if (!edge) {
logger.debug('Edge already removed, skipping operation', { edgeId })
return
}
// Check if the edge's source and target blocks still exist
const sourceExists = workflowStore.blocks[edge.source]
const targetExists = workflowStore.blocks[edge.target]
@@ -939,23 +1136,75 @@ export function useCollaborativeWorkflow() {
return
}
// Only record edge removal if it's not part of a parent update operation
if (!skipEdgeRecording.current) {
undoRedo.recordRemoveEdge(edgeId, edge)
undoRedo.recordBatchRemoveEdges([edge])
}
executeQueuedOperation('remove', 'edge', { id: edgeId }, () =>
executeQueuedOperation(EDGE_OPERATIONS.REMOVE, OPERATION_TARGETS.EDGE, { id: edgeId }, () =>
workflowStore.removeEdge(edgeId)
)
},
[executeQueuedOperation, workflowStore, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
return false
}
if (edgeIds.length === 0) return false
const edgeSnapshots: Edge[] = []
const validEdgeIds: string[] = []
for (const edgeId of edgeIds) {
const edge = workflowStore.edges.find((e) => e.id === edgeId)
if (edge) {
const sourceExists = workflowStore.blocks[edge.source]
const targetExists = workflowStore.blocks[edge.target]
if (sourceExists && targetExists) {
edgeSnapshots.push(edge)
validEdgeIds.push(edgeId)
}
}
}
if (validEdgeIds.length === 0) {
logger.debug('No valid edges to remove')
return false
}
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: EDGES_OPERATIONS.BATCH_REMOVE_EDGES,
target: OPERATION_TARGETS.EDGES,
payload: { ids: validEdgeIds },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
validEdgeIds.forEach((id) => workflowStore.removeEdge(id))
if (!options?.skipUndoRedo && edgeSnapshots.length > 0) {
undoRedo.recordBatchRemoveEdges(edgeSnapshots)
}
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
[isInActiveRoom, workflowStore, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
(blockId: string, subblockId: string, value: any, options?: { _visited?: Set<string> }) => {
if (isApplyingRemoteChange.current) return
// Skip socket operations when viewing baseline diff
if (isBaselineDiffView) {
logger.debug('Skipping collaborative subblock update while viewing baseline diff')
return
@@ -971,28 +1220,23 @@ export function useCollaborativeWorkflow() {
return
}
// Generate operation ID for queue tracking
const operationId = crypto.randomUUID()
// Get fresh activeWorkflowId from store to avoid stale closure
const currentActiveWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
// Add to queue for retry mechanism
addToQueue({
id: operationId,
operation: {
operation: 'subblock-update',
target: 'subblock',
operation: SUBBLOCK_OPERATIONS.UPDATE,
target: OPERATION_TARGETS.SUBBLOCK,
payload: { blockId, subblockId, value },
},
workflowId: currentActiveWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
// Apply locally first (immediate UI feedback)
subBlockStore.setValue(blockId, subblockId, value)
// Declarative clearing: clear sub-blocks that depend on this subblockId
try {
const visited = options?._visited || new Set<string>()
if (visited.has(subblockId)) return
@@ -1004,9 +1248,7 @@ export function useCollaborativeWorkflow() {
(sb: any) => Array.isArray(sb.dependsOn) && sb.dependsOn.includes(subblockId)
)
for (const dep of dependents) {
// Skip clearing if the dependent is the same field
if (!dep?.id || dep.id === subblockId) continue
// Cascade using the same collaborative path so it emits and further cascades
collaborativeSetSubblockValue(blockId, dep.id, '', { _visited: visited })
}
}
@@ -1049,8 +1291,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
operation: 'subblock-update',
target: 'subblock',
operation: SUBBLOCK_OPERATIONS.UPDATE,
target: OPERATION_TARGETS.SUBBLOCK,
payload: { blockId, subblockId, value },
},
workflowId: activeWorkflowId || '',
@@ -1096,12 +1338,17 @@ export function useCollaborativeWorkflow() {
doWhileCondition: existingDoWhileCondition ?? '',
}
executeQueuedOperation('update', 'subflow', { id: loopId, type: 'loop', config }, () => {
workflowStore.updateLoopType(loopId, loopType)
workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '')
workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '')
workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '')
})
executeQueuedOperation(
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: loopId, type: 'loop', config },
() => {
workflowStore.updateLoopType(loopId, loopType)
workflowStore.setLoopForEachItems(loopId, existingForEachItems ?? '')
workflowStore.setLoopWhileCondition(loopId, existingWhileCondition ?? '')
workflowStore.setLoopDoWhileCondition(loopId, existingDoWhileCondition ?? '')
}
)
},
[executeQueuedOperation, workflowStore]
)
@@ -1134,8 +1381,8 @@ export function useCollaborativeWorkflow() {
}
executeQueuedOperation(
'update',
'subflow',
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: parallelId, type: 'parallel', config },
() => {
workflowStore.updateParallelType(parallelId, parallelType)
@@ -1169,8 +1416,11 @@ export function useCollaborativeWorkflow() {
forEachItems: currentCollection,
}
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () =>
workflowStore.updateLoopCount(nodeId, count)
executeQueuedOperation(
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: nodeId, type: 'loop', config },
() => workflowStore.updateLoopCount(nodeId, count)
)
} else {
const currentDistribution = currentBlock.data?.collection || ''
@@ -1184,8 +1434,11 @@ export function useCollaborativeWorkflow() {
parallelType: currentParallelType,
}
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
workflowStore.updateParallelCount(nodeId, count)
executeQueuedOperation(
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: nodeId, type: 'parallel', config },
() => workflowStore.updateParallelCount(nodeId, count)
)
}
},
@@ -1230,11 +1483,16 @@ export function useCollaborativeWorkflow() {
doWhileCondition: nextDoWhileCondition ?? '',
}
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'loop', config }, () => {
workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '')
workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '')
workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '')
})
executeQueuedOperation(
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: nodeId, type: 'loop', config },
() => {
workflowStore.setLoopForEachItems(nodeId, nextForEachItems ?? '')
workflowStore.setLoopWhileCondition(nodeId, nextWhileCondition ?? '')
workflowStore.setLoopDoWhileCondition(nodeId, nextDoWhileCondition ?? '')
}
)
} else {
const currentCount = currentBlock.data?.count || 5
const currentParallelType = currentBlock.data?.parallelType || 'count'
@@ -1247,8 +1505,11 @@ export function useCollaborativeWorkflow() {
parallelType: currentParallelType,
}
executeQueuedOperation('update', 'subflow', { id: nodeId, type: 'parallel', config }, () =>
workflowStore.updateParallelCollection(nodeId, collection)
executeQueuedOperation(
SUBFLOW_OPERATIONS.UPDATE,
OPERATION_TARGETS.SUBFLOW,
{ id: nodeId, type: 'parallel', config },
() => workflowStore.updateParallelCollection(nodeId, collection)
)
}
},
@@ -1257,15 +1518,20 @@ export function useCollaborativeWorkflow() {
const collaborativeUpdateVariable = useCallback(
(variableId: string, field: 'name' | 'value' | 'type', value: any) => {
executeQueuedOperation('variable-update', 'variable', { variableId, field, value }, () => {
if (field === 'name') {
variablesStore.updateVariable(variableId, { name: value })
} else if (field === 'value') {
variablesStore.updateVariable(variableId, { value })
} else if (field === 'type') {
variablesStore.updateVariable(variableId, { type: value })
executeQueuedOperation(
VARIABLE_OPERATIONS.UPDATE,
OPERATION_TARGETS.VARIABLE,
{ variableId, field, value },
() => {
if (field === 'name') {
variablesStore.updateVariable(variableId, { name: value })
} else if (field === 'value') {
variablesStore.updateVariable(variableId, { value })
} else if (field === 'type') {
variablesStore.updateVariable(variableId, { type: value })
}
}
})
)
},
[executeQueuedOperation, variablesStore]
)
@@ -1287,7 +1553,12 @@ export function useCollaborativeWorkflow() {
// Queue operation with processed name for server & other clients
// Empty callback because local store is already updated above
executeQueuedOperation('add', 'variable', payloadWithProcessedName, () => {})
executeQueuedOperation(
VARIABLE_OPERATIONS.ADD,
OPERATION_TARGETS.VARIABLE,
payloadWithProcessedName,
() => {}
)
}
return id
@@ -1299,9 +1570,14 @@ export function useCollaborativeWorkflow() {
(variableId: string) => {
cancelOperationsForVariable(variableId)
executeQueuedOperation('remove', 'variable', { variableId }, () => {
variablesStore.deleteVariable(variableId)
})
executeQueuedOperation(
VARIABLE_OPERATIONS.REMOVE,
OPERATION_TARGETS.VARIABLE,
{ variableId },
() => {
variablesStore.deleteVariable(variableId)
}
)
},
[executeQueuedOperation, variablesStore, cancelOperationsForVariable]
)
@@ -1337,8 +1613,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
operation: 'batch-add-blocks',
target: 'blocks',
operation: BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS,
target: OPERATION_TARGETS.BLOCKS,
payload: { blocks, edges, loops, parallels, subBlockValues },
},
workflowId: activeWorkflowId || '',
@@ -1469,8 +1745,8 @@ export function useCollaborativeWorkflow() {
addToQueue({
id: operationId,
operation: {
operation: 'batch-remove-blocks',
target: 'blocks',
operation: BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS,
target: OPERATION_TARGETS.BLOCKS,
payload: { ids: Array.from(allBlocksToRemove) },
},
workflowId: activeWorkflowId || '',
@@ -1512,15 +1788,17 @@ export function useCollaborativeWorkflow() {
// Collaborative operations
collaborativeBatchUpdatePositions,
collaborativeUpdateBlockName,
collaborativeToggleBlockEnabled,
collaborativeBatchToggleBlockEnabled,
collaborativeUpdateParentId,
collaborativeBatchUpdateParent,
collaborativeToggleBlockAdvancedMode,
collaborativeToggleBlockTriggerMode,
collaborativeToggleBlockHandles,
collaborativeBatchToggleBlockHandles,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeAddEdge,
collaborativeRemoveEdge,
collaborativeBatchRemoveEdges,
collaborativeSetSubblockValue,
collaborativeSetTagSelection,

View File

@@ -1,25 +0,0 @@
import { type MutableRefObject, useEffect, useRef } from 'react'
/**
* A hook that handles forwarded refs and returns a mutable ref object
* Useful for components that need both a forwarded ref and a local ref
* @param forwardedRef The forwarded ref from React.forwardRef
* @returns A mutable ref object that can be used locally
*/
export function useForwardedRef<T>(
forwardedRef: React.ForwardedRef<T>
): MutableRefObject<T | null> {
const innerRef = useRef<T | null>(null)
useEffect(() => {
if (!forwardedRef) return
if (typeof forwardedRef === 'function') {
forwardedRef(innerRef.current)
} else {
forwardedRef.current = innerRef.current
}
}, [forwardedRef])
return innerRef
}

View File

@@ -1,217 +0,0 @@
import { useCallback, useEffect, useState } from 'react'
import { createLogger } from '@sim/logger'
import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants'
const logger = createLogger('useSubscriptionState')
interface UsageData {
current: number
limit: number
percentUsed: number
isWarning: boolean
isExceeded: boolean
billingPeriodStart: Date | null
billingPeriodEnd: Date | null
lastPeriodCost: number
}
interface SubscriptionState {
isPaid: boolean
isPro: boolean
isTeam: boolean
isEnterprise: boolean
plan: string
status: string | null
seats: number | null
metadata: any | null
usage: UsageData
}
/**
* Consolidated hook for subscription state management
* Combines subscription status, features, and usage data
*/
export function useSubscriptionState() {
const [data, setData] = useState<SubscriptionState | null>(null)
const [isLoading, setIsLoading] = useState(true)
const [error, setError] = useState<Error | null>(null)
const fetchSubscriptionState = useCallback(async () => {
try {
setIsLoading(true)
setError(null)
const response = await fetch('/api/billing?context=user')
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`)
}
const result = await response.json()
const subscriptionData = result.data
setData(subscriptionData)
} catch (error) {
const err = error instanceof Error ? error : new Error('Failed to fetch subscription state')
logger.error('Failed to fetch subscription state', { error })
setError(err)
} finally {
setIsLoading(false)
}
}, [])
useEffect(() => {
fetchSubscriptionState()
}, [fetchSubscriptionState])
const refetch = useCallback(() => {
return fetchSubscriptionState()
}, [fetchSubscriptionState])
return {
subscription: {
isPaid: data?.isPaid ?? false,
isPro: data?.isPro ?? false,
isTeam: data?.isTeam ?? false,
isEnterprise: data?.isEnterprise ?? false,
isFree: !(data?.isPaid ?? false),
plan: data?.plan ?? 'free',
status: data?.status,
seats: data?.seats,
metadata: data?.metadata,
},
usage: {
current: data?.usage?.current ?? 0,
limit: data?.usage?.limit ?? DEFAULT_FREE_CREDITS,
percentUsed: data?.usage?.percentUsed ?? 0,
isWarning: data?.usage?.isWarning ?? false,
isExceeded: data?.usage?.isExceeded ?? false,
billingPeriodStart: data?.usage?.billingPeriodStart
? new Date(data.usage.billingPeriodStart)
: null,
billingPeriodEnd: data?.usage?.billingPeriodEnd
? new Date(data.usage.billingPeriodEnd)
: null,
lastPeriodCost: data?.usage?.lastPeriodCost ?? 0,
},
isLoading,
error,
refetch,
isAtLeastPro: () => {
return data?.isPro || data?.isTeam || data?.isEnterprise || false
},
isAtLeastTeam: () => {
return data?.isTeam || data?.isEnterprise || false
},
canUpgrade: () => {
return data?.plan === 'free' || data?.plan === 'pro'
},
getBillingStatus: () => {
const usage = data?.usage
if (!usage) return 'unknown'
if (usage.isExceeded) return 'exceeded'
if (usage.isWarning) return 'warning'
return 'ok'
},
getRemainingBudget: () => {
const usage = data?.usage
if (!usage) return 0
return Math.max(0, usage.limit - usage.current)
},
getDaysRemainingInPeriod: () => {
const usage = data?.usage
if (!usage?.billingPeriodEnd) return null
const now = new Date()
const endDate = new Date(usage.billingPeriodEnd)
const diffTime = endDate.getTime() - now.getTime()
const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24))
return Math.max(0, diffDays)
},
}
}
/**
* Hook for usage limit information with editing capabilities
*/
export function useUsageLimit() {
const [data, setData] = useState<any>(null)
const [isLoading, setIsLoading] = useState(true)
const [error, setError] = useState<Error | null>(null)
const fetchUsageLimit = useCallback(async () => {
try {
setIsLoading(true)
setError(null)
const response = await fetch('/api/usage?context=user')
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`)
}
const limitData = await response.json()
setData(limitData)
} catch (error) {
const err = error instanceof Error ? error : new Error('Failed to fetch usage limit')
logger.error('Failed to fetch usage limit', { error })
setError(err)
} finally {
setIsLoading(false)
}
}, [])
useEffect(() => {
fetchUsageLimit()
}, [fetchUsageLimit])
const refetch = useCallback(() => {
return fetchUsageLimit()
}, [fetchUsageLimit])
const updateLimit = async (newLimit: number) => {
try {
const response = await fetch('/api/usage?context=user', {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ limit: newLimit }),
})
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to update usage limit')
}
await refetch()
return { success: true }
} catch (error) {
logger.error('Failed to update usage limit', { error, newLimit })
throw error
}
}
return {
currentLimit: data?.currentLimit ?? DEFAULT_FREE_CREDITS,
canEdit: data?.canEdit ?? false,
minimumLimit: data?.minimumLimit ?? DEFAULT_FREE_CREDITS,
plan: data?.plan ?? 'free',
setBy: data?.setBy,
updatedAt: data?.updatedAt ? new Date(data.updatedAt) : null,
updateLimit,
isLoading,
error,
refetch,
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -302,7 +302,11 @@ export function useWebhookManagement({
effectiveTriggerId: string | undefined,
selectedCredentialId: string | null
): Promise<boolean> => {
const triggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
const triggerConfigRaw = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
const triggerConfig =
typeof triggerConfigRaw === 'object' && triggerConfigRaw !== null
? (triggerConfigRaw as Record<string, unknown>)
: {}
const isCredentialSet = selectedCredentialId?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet

View File

@@ -1,3 +1,5 @@
import { db, workflow } from '@sim/db'
import { eq } from 'drizzle-orm'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import type { ExecutionEnvironment, ExecutionTrigger, WorkflowState } from '@/lib/logs/types'
import {
@@ -34,7 +36,15 @@ export function createEnvironmentObject(
}
export async function loadWorkflowStateForExecution(workflowId: string): Promise<WorkflowState> {
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
const [normalizedData, workflowRecord] = await Promise.all([
loadWorkflowFromNormalizedTables(workflowId),
db
.select({ variables: workflow.variables })
.from(workflow)
.where(eq(workflow.id, workflowId))
.limit(1)
.then((rows) => rows[0]),
])
if (!normalizedData) {
throw new Error(
@@ -47,6 +57,7 @@ export async function loadWorkflowStateForExecution(workflowId: string): Promise
edges: normalizedData.edges || [],
loops: normalizedData.loops || {},
parallels: normalizedData.parallels || {},
variables: (workflowRecord?.variables as WorkflowState['variables']) || undefined,
}
}
@@ -65,6 +76,7 @@ export async function loadDeployedWorkflowStateForLogging(
edges: deployedData.edges || [],
loops: deployedData.loops || {},
parallels: deployedData.parallels || {},
variables: deployedData.variables as WorkflowState['variables'],
}
}

View File

@@ -105,7 +105,7 @@ describe('SnapshotService', () => {
block1: {
...baseState.blocks.block1,
// Different block state - we can change outputs to make it different
outputs: { response: { content: 'different result' } as Record<string, any> },
outputs: { response: { type: 'string', description: 'different result' } },
},
},
}
@@ -177,7 +177,7 @@ describe('SnapshotService', () => {
},
},
outputs: {
response: { content: 'Agent response' } as Record<string, any>,
response: { type: 'string', description: 'Agent response' },
},
enabled: true,
horizontalHandles: true,
@@ -211,5 +211,113 @@ describe('SnapshotService', () => {
const hash2 = service.computeStateHash(complexState)
expect(hash).toBe(hash2)
})
test('should include variables in hash computation', () => {
const stateWithVariables: WorkflowState = {
blocks: {},
edges: [],
loops: {},
parallels: {},
variables: {
'var-1': {
id: 'var-1',
name: 'apiKey',
type: 'string',
value: 'secret123',
},
},
}
const stateWithoutVariables: WorkflowState = {
blocks: {},
edges: [],
loops: {},
parallels: {},
}
const hashWith = service.computeStateHash(stateWithVariables)
const hashWithout = service.computeStateHash(stateWithoutVariables)
expect(hashWith).not.toBe(hashWithout)
})
test('should detect changes in variable values', () => {
const state1: WorkflowState = {
blocks: {},
edges: [],
loops: {},
parallels: {},
variables: {
'var-1': {
id: 'var-1',
name: 'myVar',
type: 'string',
value: 'value1',
},
},
}
const state2: WorkflowState = {
blocks: {},
edges: [],
loops: {},
parallels: {},
variables: {
'var-1': {
id: 'var-1',
name: 'myVar',
type: 'string',
value: 'value2', // Different value
},
},
}
const hash1 = service.computeStateHash(state1)
const hash2 = service.computeStateHash(state2)
expect(hash1).not.toBe(hash2)
})
test('should generate consistent hashes for states with variables', () => {
const stateWithVariables: WorkflowState = {
blocks: {
block1: {
id: 'block1',
name: 'Test',
type: 'agent',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
height: 0,
},
},
edges: [],
loops: {},
parallels: {},
variables: {
'var-1': {
id: 'var-1',
name: 'testVar',
type: 'plain',
value: 'testValue',
},
'var-2': {
id: 'var-2',
name: 'anotherVar',
type: 'number',
value: 42,
},
},
}
const hash1 = service.computeStateHash(stateWithVariables)
const hash2 = service.computeStateHash(stateWithVariables)
expect(hash1).toBe(hash2)
expect(hash1).toHaveLength(64)
})
})
})

View File

@@ -182,11 +182,15 @@ export class SnapshotService implements ISnapshotService {
normalizedParallels[parallelId] = normalizeValue(parallel)
}
// 4. Normalize variables (if present)
const normalizedVariables = state.variables ? normalizeValue(state.variables) : undefined
return {
blocks: normalizedBlocks,
edges: normalizedEdges,
loops: normalizedLoops,
parallels: normalizedParallels,
...(normalizedVariables !== undefined && { variables: normalizedVariables }),
}
}
}

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import { extractInputFormatFromBlocks, generateToolInputSchema } from './workflow-tool-schema'
const logger = createLogger('WorkflowMcpSync')
@@ -59,7 +60,7 @@ export async function syncMcpToolsForWorkflow(options: SyncOptions): Promise<voi
}
// Check if workflow has a valid start block
if (!hasValidStartBlockInState(workflowState)) {
if (!hasValidStartBlockInState(workflowState as WorkflowState | null)) {
await db.delete(workflowMcpTool).where(eq(workflowMcpTool.workflowId, workflowId))
logger.info(
`[${requestId}] Removed ${tools.length} MCP tool(s) - workflow has no start block (${context}): ${workflowId}`

View File

@@ -10,6 +10,15 @@ vi.mock('@sim/logger', () => ({
}),
}))
vi.mock('dns', () => ({
resolveMx: (
_domain: string,
callback: (err: Error | null, addresses: { exchange: string; priority: number }[]) => void
) => {
callback(null, [{ exchange: 'mail.example.com', priority: 10 }])
},
}))
describe('Email Validation', () => {
describe('validateEmail', () => {
it.concurrent('should validate a correct email', async () => {

View File

@@ -1,5 +1,8 @@
import type { BlockState, Position } from '@/stores/workflows/workflow/types'
export type { Edge } from 'reactflow'
export type { Loop, Parallel } from '@/stores/workflows/workflow/types'
export interface LayoutOptions {
horizontalSpacing?: number
verticalSpacing?: number
@@ -12,30 +15,6 @@ export interface LayoutResult {
error?: string
}
export interface Edge {
id: string
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}
export interface Loop {
id: string
nodes: string[]
iterations: number
loopType: 'for' | 'forEach' | 'while' | 'doWhile'
forEachItems?: any[] | Record<string, any> | string // Items or expression
whileCondition?: string // JS expression that evaluates to boolean
}
export interface Parallel {
id: string
nodes: string[]
count?: number
parallelType?: 'count' | 'collection'
}
export interface BlockMetrics {
width: number
height: number

View File

@@ -11,10 +11,23 @@ import {
USER_FILE_PROPERTY_TYPES,
} from '@/lib/workflows/types'
import { getBlock } from '@/blocks'
import type { BlockConfig, OutputCondition } from '@/blocks/types'
import type { BlockConfig, OutputCondition, OutputFieldDefinition } from '@/blocks/types'
import { getTrigger, isTriggerValid } from '@/triggers'
type OutputDefinition = Record<string, any>
type OutputDefinition = Record<string, OutputFieldDefinition>
interface SubBlockWithValue {
value?: unknown
}
type ConditionValue = string | number | boolean
/**
* Checks if a value is a valid primitive for condition comparison.
*/
function isConditionPrimitive(value: unknown): value is ConditionValue {
return typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean'
}
/**
* Evaluates an output condition against subBlock values.
@@ -22,7 +35,7 @@ type OutputDefinition = Record<string, any>
*/
function evaluateOutputCondition(
condition: OutputCondition,
subBlocks: Record<string, any> | undefined
subBlocks: Record<string, SubBlockWithValue> | undefined
): boolean {
if (!subBlocks) return false
@@ -30,7 +43,8 @@ function evaluateOutputCondition(
let matches: boolean
if (Array.isArray(condition.value)) {
matches = condition.value.includes(fieldValue)
// For array conditions, check if fieldValue is a valid primitive and included
matches = isConditionPrimitive(fieldValue) && condition.value.includes(fieldValue)
} else {
matches = fieldValue === condition.value
}
@@ -44,7 +58,8 @@ function evaluateOutputCondition(
let andMatches: boolean
if (Array.isArray(condition.and.value)) {
andMatches = condition.and.value.includes(andFieldValue)
andMatches =
isConditionPrimitive(andFieldValue) && condition.and.value.includes(andFieldValue)
} else {
andMatches = andFieldValue === condition.and.value
}
@@ -65,7 +80,7 @@ function evaluateOutputCondition(
*/
function filterOutputsByCondition(
outputs: OutputDefinition,
subBlocks: Record<string, any> | undefined
subBlocks: Record<string, SubBlockWithValue> | undefined
): OutputDefinition {
const filtered: OutputDefinition = {}
@@ -119,7 +134,7 @@ function hasInputFormat(blockConfig: BlockConfig): boolean {
}
function getTriggerId(
subBlocks: Record<string, any> | undefined,
subBlocks: Record<string, SubBlockWithValue> | undefined,
blockConfig: BlockConfig
): string | undefined {
const selectedTriggerIdValue = subBlocks?.selectedTriggerId?.value
@@ -136,13 +151,17 @@ function getTriggerId(
)
}
function getUnifiedStartOutputs(subBlocks: Record<string, any> | undefined): OutputDefinition {
function getUnifiedStartOutputs(
subBlocks: Record<string, SubBlockWithValue> | undefined
): OutputDefinition {
const outputs = { ...UNIFIED_START_OUTPUTS }
const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value)
return applyInputFormatFields(normalizedInputFormat, outputs)
}
function getLegacyStarterOutputs(subBlocks: Record<string, any> | undefined): OutputDefinition {
function getLegacyStarterOutputs(
subBlocks: Record<string, SubBlockWithValue> | undefined
): OutputDefinition {
const startWorkflowValue = subBlocks?.startWorkflow?.value
if (startWorkflowValue === 'chat') {
@@ -179,7 +198,7 @@ function shouldClearBaseOutputs(
function applyInputFormatToOutputs(
blockType: string,
blockConfig: BlockConfig,
subBlocks: Record<string, any> | undefined,
subBlocks: Record<string, SubBlockWithValue> | undefined,
baseOutputs: OutputDefinition
): OutputDefinition {
if (!hasInputFormat(blockConfig) || !subBlocks?.inputFormat?.value) {
@@ -203,7 +222,7 @@ function applyInputFormatToOutputs(
export function getBlockOutputs(
blockType: string,
subBlocks?: Record<string, any>,
subBlocks?: Record<string, SubBlockWithValue>,
triggerMode?: boolean
): OutputDefinition {
const blockConfig = getBlock(blockType)
@@ -214,7 +233,8 @@ export function getBlockOutputs(
if (triggerId && isTriggerValid(triggerId)) {
const trigger = getTrigger(triggerId)
if (trigger.outputs) {
return trigger.outputs
// TriggerOutput is compatible with OutputFieldDefinition at runtime
return trigger.outputs as OutputDefinition
}
}
}
@@ -226,7 +246,7 @@ export function getBlockOutputs(
}
if (blockType === 'human_in_the_loop') {
const hitlOutputs: Record<string, any> = {
const hitlOutputs: OutputDefinition = {
url: { type: 'string', description: 'Resume UI URL' },
resumeEndpoint: {
type: 'string',
@@ -251,7 +271,7 @@ export function getBlockOutputs(
if (blockType === 'approval') {
// Start with only url (apiUrl commented out - not accessible as output)
const pauseResumeOutputs: Record<string, any> = {
const pauseResumeOutputs: OutputDefinition = {
url: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
}
@@ -285,7 +305,7 @@ function shouldFilterReservedField(
blockType: string,
key: string,
prefix: string,
subBlocks: Record<string, any> | undefined
subBlocks: Record<string, SubBlockWithValue> | undefined
): boolean {
if (blockType !== TRIGGER_TYPES.START || prefix) {
return false
@@ -308,7 +328,7 @@ function expandFileTypeProperties(path: string): string[] {
function collectOutputPaths(
obj: OutputDefinition,
blockType: string,
subBlocks: Record<string, any> | undefined,
subBlocks: Record<string, SubBlockWithValue> | undefined,
prefix = ''
): string[] {
const paths: string[] = []
@@ -321,13 +341,14 @@ function collectOutputPaths(
}
if (value && typeof value === 'object' && 'type' in value) {
if (value.type === 'files') {
const typedValue = value as { type: unknown }
if (typedValue.type === 'files') {
paths.push(...expandFileTypeProperties(path))
} else {
paths.push(path)
}
} else if (value && typeof value === 'object' && !Array.isArray(value)) {
paths.push(...collectOutputPaths(value, blockType, subBlocks, path))
paths.push(...collectOutputPaths(value as OutputDefinition, blockType, subBlocks, path))
} else {
paths.push(path)
}
@@ -338,7 +359,7 @@ function collectOutputPaths(
export function getBlockOutputPaths(
blockType: string,
subBlocks?: Record<string, any>,
subBlocks?: Record<string, SubBlockWithValue>,
triggerMode?: boolean
): string[] {
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)
@@ -351,39 +372,45 @@ function getFilePropertyType(outputs: OutputDefinition, pathParts: string[]): st
return null
}
let current: any = outputs
let current: unknown = outputs
for (const part of pathParts.slice(0, -1)) {
if (!current || typeof current !== 'object') {
return null
}
current = current[part]
current = (current as Record<string, unknown>)[part]
}
if (current && typeof current === 'object' && 'type' in current && current.type === 'files') {
if (
current &&
typeof current === 'object' &&
'type' in current &&
(current as { type: unknown }).type === 'files'
) {
return USER_FILE_PROPERTY_TYPES[lastPart as keyof typeof USER_FILE_PROPERTY_TYPES]
}
return null
}
function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): any {
let current: any = outputs
function traverseOutputPath(outputs: OutputDefinition, pathParts: string[]): unknown {
let current: unknown = outputs
for (const part of pathParts) {
if (!current || typeof current !== 'object') {
return null
}
current = current[part]
current = (current as Record<string, unknown>)[part]
}
return current
}
function extractType(value: any): string {
function extractType(value: unknown): string {
if (!value) return 'any'
if (typeof value === 'object' && 'type' in value) {
return value.type
const typeValue = (value as { type: unknown }).type
return typeof typeValue === 'string' ? typeValue : 'any'
}
return typeof value === 'string' ? value : 'any'
@@ -392,7 +419,7 @@ function extractType(value: any): string {
export function getBlockOutputType(
blockType: string,
outputPath: string,
subBlocks?: Record<string, any>,
subBlocks?: Record<string, SubBlockWithValue>,
triggerMode?: boolean
): string {
const outputs = getBlockOutputs(blockType, subBlocks, triggerMode)

View File

@@ -51,8 +51,8 @@ export function hasWorkflowChanged(
}
// 3. Build normalized representations of blocks for comparison
const normalizedCurrentBlocks: Record<string, any> = {}
const normalizedDeployedBlocks: Record<string, any> = {}
const normalizedCurrentBlocks: Record<string, unknown> = {}
const normalizedDeployedBlocks: Record<string, unknown> = {}
for (const blockId of currentBlockIds) {
const currentBlock = currentState.blocks[blockId]
@@ -120,8 +120,9 @@ export function hasWorkflowChanged(
}
// Get values with special handling for null/undefined
let currentValue = currentSubBlocks[subBlockId].value ?? null
let deployedValue = deployedSubBlocks[subBlockId].value ?? null
// Using unknown type since sanitization functions return different types
let currentValue: unknown = currentSubBlocks[subBlockId].value ?? null
let deployedValue: unknown = deployedSubBlocks[subBlockId].value ?? null
if (subBlockId === 'tools' && Array.isArray(currentValue) && Array.isArray(deployedValue)) {
currentValue = sanitizeTools(currentValue)
@@ -232,8 +233,8 @@ export function hasWorkflowChanged(
}
// 6. Compare variables
const currentVariables = normalizeVariables((currentState as any).variables)
const deployedVariables = normalizeVariables((deployedState as any).variables)
const currentVariables = normalizeVariables(currentState.variables)
const deployedVariables = normalizeVariables(deployedState.variables)
const normalizedCurrentVars = normalizeValue(
Object.fromEntries(Object.entries(currentVariables).map(([id, v]) => [id, sanitizeVariable(v)]))

View File

@@ -2,6 +2,7 @@
* Tests for workflow normalization utilities
*/
import { describe, expect, it } from 'vitest'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
normalizedStringify,
normalizeEdge,
@@ -39,7 +40,7 @@ describe('Workflow Normalization Utilities', () => {
it.concurrent('should sort object keys alphabetically', () => {
const input = { zebra: 1, apple: 2, mango: 3 }
const result = normalizeValue(input)
const result = normalizeValue(input) as Record<string, unknown>
expect(Object.keys(result)).toEqual(['apple', 'mango', 'zebra'])
})
@@ -55,7 +56,10 @@ describe('Workflow Normalization Utilities', () => {
},
first: 'value',
}
const result = normalizeValue(input)
const result = normalizeValue(input) as {
first: string
outer: { z: number; a: { y: number; b: number } }
}
expect(Object.keys(result)).toEqual(['first', 'outer'])
expect(Object.keys(result.outer)).toEqual(['a', 'z'])
@@ -72,11 +76,11 @@ describe('Workflow Normalization Utilities', () => {
it.concurrent('should handle arrays with mixed types', () => {
const input = [1, 'string', { b: 2, a: 1 }, null, [3, 2, 1]]
const result = normalizeValue(input)
const result = normalizeValue(input) as unknown[]
expect(result[0]).toBe(1)
expect(result[1]).toBe('string')
expect(Object.keys(result[2])).toEqual(['a', 'b'])
expect(Object.keys(result[2] as Record<string, unknown>)).toEqual(['a', 'b'])
expect(result[3]).toBe(null)
expect(result[4]).toEqual([3, 2, 1]) // Array order preserved
})
@@ -94,7 +98,9 @@ describe('Workflow Normalization Utilities', () => {
},
},
}
const result = normalizeValue(input)
const result = normalizeValue(input) as {
level1: { level2: { level3: { level4: { z: string; a: string } } } }
}
expect(Object.keys(result.level1.level2.level3.level4)).toEqual(['a', 'z'])
})
@@ -143,7 +149,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "for" loop type', () => {
const loop = {
const loop: Loop & { extraField?: string } = {
id: 'loop1',
nodes: ['block1', 'block2'],
loopType: 'for',
@@ -164,7 +170,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "forEach" loop type', () => {
const loop = {
const loop: Loop = {
id: 'loop2',
nodes: ['block1'],
loopType: 'forEach',
@@ -183,10 +189,11 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "while" loop type', () => {
const loop = {
const loop: Loop = {
id: 'loop3',
nodes: ['block1', 'block2', 'block3'],
loopType: 'while',
iterations: 0,
whileCondition: '<block.condition> === true',
doWhileCondition: 'should-be-excluded',
}
@@ -201,10 +208,11 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "doWhile" loop type', () => {
const loop = {
const loop: Loop = {
id: 'loop4',
nodes: ['block1'],
loopType: 'doWhile',
iterations: 0,
doWhileCondition: '<counter.value> < 100',
whileCondition: 'should-be-excluded',
}
@@ -218,11 +226,11 @@ describe('Workflow Normalization Utilities', () => {
})
})
it.concurrent('should handle unknown loop type with base fields only', () => {
const loop = {
it.concurrent('should extract only relevant fields for for loop type', () => {
const loop: Loop = {
id: 'loop5',
nodes: ['block1'],
loopType: 'unknown',
loopType: 'for',
iterations: 5,
forEachItems: 'items',
}
@@ -231,7 +239,8 @@ describe('Workflow Normalization Utilities', () => {
expect(result).toEqual({
id: 'loop5',
nodes: ['block1'],
loopType: 'unknown',
loopType: 'for',
iterations: 5,
})
})
})
@@ -243,7 +252,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "count" parallel type', () => {
const parallel = {
const parallel: Parallel & { extraField?: string } = {
id: 'parallel1',
nodes: ['block1', 'block2'],
parallelType: 'count',
@@ -262,7 +271,7 @@ describe('Workflow Normalization Utilities', () => {
})
it.concurrent('should normalize "collection" parallel type', () => {
const parallel = {
const parallel: Parallel = {
id: 'parallel2',
nodes: ['block1'],
parallelType: 'collection',
@@ -279,11 +288,11 @@ describe('Workflow Normalization Utilities', () => {
})
})
it.concurrent('should handle unknown parallel type with base fields only', () => {
const parallel = {
it.concurrent('should include base fields for undefined parallel type', () => {
const parallel: Parallel = {
id: 'parallel3',
nodes: ['block1'],
parallelType: 'unknown',
parallelType: undefined,
count: 5,
distribution: 'items',
}
@@ -292,7 +301,7 @@ describe('Workflow Normalization Utilities', () => {
expect(result).toEqual({
id: 'parallel3',
nodes: ['block1'],
parallelType: 'unknown',
parallelType: undefined,
})
})
})
@@ -312,7 +321,7 @@ describe('Workflow Normalization Utilities', () => {
const tools = [
{ id: 'tool1', name: 'Search', isExpanded: true },
{ id: 'tool2', name: 'Calculator', isExpanded: false },
{ id: 'tool3', name: 'Weather' }, // No isExpanded field
{ id: 'tool3', name: 'Weather' },
]
const result = sanitizeTools(tools)
@@ -365,7 +374,7 @@ describe('Workflow Normalization Utilities', () => {
const inputFormat = [
{ id: 'input1', name: 'Name', value: 'John', collapsed: true },
{ id: 'input2', name: 'Age', value: 25, collapsed: false },
{ id: 'input3', name: 'Email' }, // No value or collapsed
{ id: 'input3', name: 'Email' },
]
const result = sanitizeInputFormat(inputFormat)

View File

@@ -3,12 +3,15 @@
* Used by both client-side signature computation and server-side comparison.
*/
import type { Edge } from 'reactflow'
import type { Loop, Parallel, Variable } from '@/stores/workflows/workflow/types'
/**
* Normalizes a value for consistent comparison by sorting object keys recursively
* @param value - The value to normalize
* @returns A normalized version of the value with sorted keys
*/
export function normalizeValue(value: any): any {
export function normalizeValue(value: unknown): unknown {
if (value === null || value === undefined || typeof value !== 'object') {
return value
}
@@ -17,9 +20,9 @@ export function normalizeValue(value: any): any {
return value.map(normalizeValue)
}
const sorted: Record<string, any> = {}
for (const key of Object.keys(value).sort()) {
sorted[key] = normalizeValue(value[key])
const sorted: Record<string, unknown> = {}
for (const key of Object.keys(value as Record<string, unknown>).sort()) {
sorted[key] = normalizeValue((value as Record<string, unknown>)[key])
}
return sorted
}
@@ -29,19 +32,30 @@ export function normalizeValue(value: any): any {
* @param value - The value to normalize and stringify
* @returns A normalized JSON string
*/
export function normalizedStringify(value: any): string {
export function normalizedStringify(value: unknown): string {
return JSON.stringify(normalizeValue(value))
}
/** Normalized loop result type with only essential fields */
interface NormalizedLoop {
id: string
nodes: string[]
loopType: Loop['loopType']
iterations?: number
forEachItems?: Loop['forEachItems']
whileCondition?: string
doWhileCondition?: string
}
/**
* Normalizes a loop configuration by extracting only the relevant fields for the loop type
* @param loop - The loop configuration object
* @returns Normalized loop with only relevant fields
*/
export function normalizeLoop(loop: any): any {
export function normalizeLoop(loop: Loop | null | undefined): NormalizedLoop | null | undefined {
if (!loop) return loop
const { id, nodes, loopType, iterations, forEachItems, whileCondition, doWhileCondition } = loop
const base: any = { id, nodes, loopType }
const base: Pick<NormalizedLoop, 'id' | 'nodes' | 'loopType'> = { id, nodes, loopType }
switch (loopType) {
case 'for':
@@ -57,15 +71,30 @@ export function normalizeLoop(loop: any): any {
}
}
/** Normalized parallel result type with only essential fields */
interface NormalizedParallel {
id: string
nodes: string[]
parallelType: Parallel['parallelType']
count?: number
distribution?: Parallel['distribution']
}
/**
* Normalizes a parallel configuration by extracting only the relevant fields for the parallel type
* @param parallel - The parallel configuration object
* @returns Normalized parallel with only relevant fields
*/
export function normalizeParallel(parallel: any): any {
export function normalizeParallel(
parallel: Parallel | null | undefined
): NormalizedParallel | null | undefined {
if (!parallel) return parallel
const { id, nodes, parallelType, count, distribution } = parallel
const base: any = { id, nodes, parallelType }
const base: Pick<NormalizedParallel, 'id' | 'nodes' | 'parallelType'> = {
id,
nodes,
parallelType,
}
switch (parallelType) {
case 'count':
@@ -77,23 +106,37 @@ export function normalizeParallel(parallel: any): any {
}
}
/** Tool configuration with optional UI-only isExpanded field */
type ToolWithExpanded = Record<string, unknown> & { isExpanded?: boolean }
/**
* Sanitizes tools array by removing UI-only fields like isExpanded
* @param tools - Array of tool configurations
* @returns Sanitized tools array
*/
export function sanitizeTools(tools: any[] | undefined): any[] {
export function sanitizeTools(tools: unknown[] | undefined): Record<string, unknown>[] {
if (!Array.isArray(tools)) return []
return tools.map(({ isExpanded, ...rest }) => rest)
return tools.map((tool) => {
if (tool && typeof tool === 'object' && !Array.isArray(tool)) {
const { isExpanded, ...rest } = tool as ToolWithExpanded
return rest
}
return tool as Record<string, unknown>
})
}
/** Variable with optional UI-only validationError field */
type VariableWithValidation = Variable & { validationError?: string }
/**
* Sanitizes a variable by removing UI-only fields like validationError
* @param variable - The variable object
* @returns Sanitized variable object
*/
export function sanitizeVariable(variable: any): any {
export function sanitizeVariable(
variable: VariableWithValidation | null | undefined
): Omit<VariableWithValidation, 'validationError'> | null | undefined {
if (!variable || typeof variable !== 'object') return variable
const { validationError, ...rest } = variable
return rest
@@ -105,21 +148,38 @@ export function sanitizeVariable(variable: any): any {
* @param variables - The variables to normalize
* @returns A normalized variables object
*/
export function normalizeVariables(variables: any): Record<string, any> {
export function normalizeVariables(variables: unknown): Record<string, Variable> {
if (!variables) return {}
if (Array.isArray(variables)) return {}
if (typeof variables !== 'object') return {}
return variables
return variables as Record<string, Variable>
}
/** Input format item with optional UI-only fields */
type InputFormatItem = Record<string, unknown> & { value?: unknown; collapsed?: boolean }
/**
* Sanitizes inputFormat array by removing UI-only fields like value and collapsed
* @param inputFormat - Array of input format configurations
* @returns Sanitized input format array
*/
export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] {
export function sanitizeInputFormat(inputFormat: unknown[] | undefined): Record<string, unknown>[] {
if (!Array.isArray(inputFormat)) return []
return inputFormat.map(({ value, collapsed, ...rest }) => rest)
return inputFormat.map((item) => {
if (item && typeof item === 'object' && !Array.isArray(item)) {
const { value, collapsed, ...rest } = item as InputFormatItem
return rest
}
return item as Record<string, unknown>
})
}
/** Normalized edge with only connection-relevant fields */
interface NormalizedEdge {
source: string
sourceHandle?: string | null
target: string
targetHandle?: string | null
}
/**
@@ -127,12 +187,7 @@ export function sanitizeInputFormat(inputFormat: any[] | undefined): any[] {
* @param edge - The edge object
* @returns Normalized edge with only connection fields
*/
export function normalizeEdge(edge: any): {
source: string
sourceHandle?: string
target: string
targetHandle?: string
} {
export function normalizeEdge(edge: Edge): NormalizedEdge {
return {
source: edge.source,
sourceHandle: edge.sourceHandle,
@@ -147,8 +202,18 @@ export function normalizeEdge(edge: any): {
* @returns Sorted array of normalized edges
*/
export function sortEdges(
edges: Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }>
): Array<{ source: string; sourceHandle?: string; target: string; targetHandle?: string }> {
edges: Array<{
source: string
sourceHandle?: string | null
target: string
targetHandle?: string | null
}>
): Array<{
source: string
sourceHandle?: string | null
target: string
targetHandle?: string | null
}> {
return [...edges].sort((a, b) =>
`${a.source}-${a.sourceHandle}-${a.target}-${a.targetHandle}`.localeCompare(
`${b.source}-${b.sourceHandle}-${b.target}-${b.targetHandle}`

View File

@@ -1,6 +1,15 @@
import { getBlock } from '@/blocks/registry'
import type { SubBlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import type { BlockState, SubBlockState, WorkflowState } from '@/stores/workflows/workflow/types'
/** Condition type for SubBlock visibility - mirrors the inline type from blocks/types.ts */
interface SubBlockCondition {
field: string
value: string | number | boolean | Array<string | number | boolean> | undefined
not?: boolean
and?: SubBlockCondition
}
// Credential types based on actual patterns in the codebase
export enum CredentialType {
@@ -48,7 +57,9 @@ const WORKSPACE_SPECIFIC_FIELDS = new Set([
* Extract required credentials from a workflow state
* This analyzes all blocks and their subblocks to identify credential requirements
*/
export function extractRequiredCredentials(state: any): CredentialRequirement[] {
export function extractRequiredCredentials(
state: Partial<WorkflowState> | null | undefined
): CredentialRequirement[] {
const credentials: CredentialRequirement[] = []
const seen = new Set<string>()
@@ -57,7 +68,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
}
// Process each block
Object.values(state.blocks).forEach((block: any) => {
Object.values(state.blocks).forEach((block: BlockState) => {
if (!block?.type) return
const blockConfig = getBlock(block.type)
@@ -104,8 +115,8 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
})
})
// Helper to check visibility, respecting mode and conditions
function isSubBlockVisible(block: any, subBlockConfig: SubBlockConfig): boolean {
/** Helper to check visibility, respecting mode and conditions */
function isSubBlockVisible(block: BlockState, subBlockConfig: SubBlockConfig): boolean {
const mode = subBlockConfig.mode ?? 'both'
if (mode === 'trigger' && !block?.triggerMode) return false
if (mode === 'basic' && block?.advancedMode) return false
@@ -118,7 +129,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
? subBlockConfig.condition()
: subBlockConfig.condition
const evaluate = (cond: any): boolean => {
const evaluate = (cond: SubBlockCondition): boolean => {
const currentValue = block?.subBlocks?.[cond.field]?.value
const expected = cond.value
@@ -126,7 +137,7 @@ export function extractRequiredCredentials(state: any): CredentialRequirement[]
expected === undefined
? true
: Array.isArray(expected)
? expected.includes(currentValue)
? expected.includes(currentValue as string)
: currentValue === expected
if (cond.not) match = !match
@@ -161,6 +172,12 @@ function formatFieldName(fieldName: string): string {
.join(' ')
}
/** Block state with mutable subBlocks for sanitization */
interface MutableBlockState extends Omit<BlockState, 'subBlocks'> {
subBlocks: Record<string, SubBlockState | null | undefined>
data?: Record<string, unknown>
}
/**
* Remove malformed subBlocks from a block that may have been created by bugs.
* This includes subBlocks with:
@@ -168,12 +185,12 @@ function formatFieldName(fieldName: string): string {
* - Missing required `id` field
* - Type "unknown" (indicates malformed data)
*/
function removeMalformedSubBlocks(block: any): void {
function removeMalformedSubBlocks(block: MutableBlockState): void {
if (!block.subBlocks) return
const keysToRemove: string[] = []
Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
Object.entries(block.subBlocks).forEach(([key, subBlock]) => {
// Flag subBlocks with invalid keys (literal "undefined" string)
if (key === 'undefined') {
keysToRemove.push(key)
@@ -187,7 +204,8 @@ function removeMalformedSubBlocks(block: any): void {
}
// Flag subBlocks with type "unknown" (malformed data)
if (subBlock.type === 'unknown') {
// Cast to string for comparison since SubBlockType doesn't include 'unknown'
if ((subBlock.type as string) === 'unknown') {
keysToRemove.push(key)
return
}
@@ -204,6 +222,12 @@ function removeMalformedSubBlocks(block: any): void {
})
}
/** Sanitized workflow state structure */
interface SanitizedWorkflowState {
blocks?: Record<string, MutableBlockState>
[key: string]: unknown
}
/**
* Sanitize workflow state by removing all credentials and workspace-specific data
* This is used for both template creation and workflow export to ensure consistency
@@ -212,18 +236,18 @@ function removeMalformedSubBlocks(block: any): void {
* @param options - Options for sanitization behavior
*/
export function sanitizeWorkflowForSharing(
state: any,
state: Partial<WorkflowState> | null | undefined,
options: {
preserveEnvVars?: boolean // Keep {{VAR}} references for export
} = {}
): any {
const sanitized = JSON.parse(JSON.stringify(state)) // Deep clone
): SanitizedWorkflowState {
const sanitized = JSON.parse(JSON.stringify(state)) as SanitizedWorkflowState // Deep clone
if (!sanitized?.blocks) {
return sanitized
}
Object.values(sanitized.blocks).forEach((block: any) => {
Object.values(sanitized.blocks).forEach((block: MutableBlockState) => {
if (!block?.type) return
// First, remove any malformed subBlocks that may have been created by bugs
@@ -239,7 +263,7 @@ export function sanitizeWorkflowForSharing(
// Clear OAuth credentials (type: 'oauth-input')
if (subBlockConfig.type === 'oauth-input') {
block.subBlocks[subBlockConfig.id].value = null
block.subBlocks[subBlockConfig.id]!.value = null
}
// Clear secret fields (password: true)
@@ -247,24 +271,24 @@ export function sanitizeWorkflowForSharing(
// Preserve environment variable references if requested
if (
options.preserveEnvVars &&
typeof subBlock.value === 'string' &&
typeof subBlock?.value === 'string' &&
subBlock.value.startsWith('{{') &&
subBlock.value.endsWith('}}')
) {
// Keep the env var reference
} else {
block.subBlocks[subBlockConfig.id].value = null
block.subBlocks[subBlockConfig.id]!.value = null
}
}
// Clear workspace-specific selectors
else if (WORKSPACE_SPECIFIC_TYPES.has(subBlockConfig.type)) {
block.subBlocks[subBlockConfig.id].value = null
block.subBlocks[subBlockConfig.id]!.value = null
}
// Clear workspace-specific fields by ID
else if (WORKSPACE_SPECIFIC_FIELDS.has(subBlockConfig.id)) {
block.subBlocks[subBlockConfig.id].value = null
block.subBlocks[subBlockConfig.id]!.value = null
}
}
})
@@ -272,9 +296,9 @@ export function sanitizeWorkflowForSharing(
// Process subBlocks without config (fallback)
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
Object.entries(block.subBlocks).forEach(([key, subBlock]) => {
// Clear workspace-specific fields by key name
if (WORKSPACE_SPECIFIC_FIELDS.has(key)) {
if (WORKSPACE_SPECIFIC_FIELDS.has(key) && subBlock) {
subBlock.value = null
}
})
@@ -282,14 +306,14 @@ export function sanitizeWorkflowForSharing(
// Clear data field (for backward compatibility)
if (block.data) {
Object.entries(block.data).forEach(([key, value]: [string, any]) => {
Object.entries(block.data).forEach(([key]) => {
// Clear anything that looks like credentials
if (/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key)) {
block.data[key] = null
block.data![key] = null
}
// Clear workspace-specific data
if (WORKSPACE_SPECIFIC_FIELDS.has(key)) {
block.data[key] = null
block.data![key] = null
}
})
}
@@ -302,7 +326,9 @@ export function sanitizeWorkflowForSharing(
* Sanitize workflow state for templates (removes credentials and workspace data)
* Wrapper for backward compatibility
*/
export function sanitizeCredentials(state: any): any {
export function sanitizeCredentials(
state: Partial<WorkflowState> | null | undefined
): SanitizedWorkflowState {
return sanitizeWorkflowForSharing(state, { preserveEnvVars: false })
}
@@ -310,6 +336,8 @@ export function sanitizeCredentials(state: any): any {
* Sanitize workflow state for export (preserves env vars)
* Convenience wrapper for workflow export
*/
export function sanitizeForExport(state: any): any {
export function sanitizeForExport(
state: Partial<WorkflowState> | null | undefined
): SanitizedWorkflowState {
return sanitizeWorkflowForSharing(state, { preserveEnvVars: true })
}

View File

@@ -245,10 +245,10 @@ function computeFieldDiff(
const unchangedFields: string[] = []
// Check basic fields
const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles']
const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles'] as const
for (const field of fieldsToCheck) {
const currentValue = (currentBlock as any)[field]
const proposedValue = (proposedBlock as any)[field]
const currentValue = currentBlock[field]
const proposedValue = proposedBlock[field]
if (JSON.stringify(currentValue) !== JSON.stringify(proposedValue)) {
changedFields.push(field)
} else if (currentValue !== undefined) {
@@ -363,7 +363,7 @@ export class WorkflowDiffEngine {
}
// Call the API route to create the diff
const body: any = {
const body: Record<string, unknown> = {
jsonContent,
currentWorkflowState: mergedBaseline,
}
@@ -859,7 +859,7 @@ export class WorkflowDiffEngine {
const proposedEdgeSet = new Set<string>()
// Create edge identifiers for current state (using sim-agent format)
mergedBaseline.edges.forEach((edge: any) => {
mergedBaseline.edges.forEach((edge: Edge) => {
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
currentEdgeSet.add(edgeId)
})
@@ -992,7 +992,7 @@ export class WorkflowDiffEngine {
}
// Call the API route to merge the diff
const body: any = {
const body: Record<string, unknown> = {
existingDiff: this.currentDiff,
jsonContent,
}

View File

@@ -5,6 +5,7 @@ import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata } from '@/executor/execution/types'
import type { ExecutionResult, StreamingExecution } from '@/executor/types'
const logger = createLogger('WorkflowExecution')
@@ -13,8 +14,8 @@ export interface ExecuteWorkflowOptions {
selectedOutputs?: string[]
isSecureMode?: boolean
workflowTriggerType?: 'api' | 'chat'
onStream?: (streamingExec: any) => Promise<void>
onBlockComplete?: (blockId: string, output: any) => Promise<void>
onStream?: (streamingExec: StreamingExecution) => Promise<void>
onBlockComplete?: (blockId: string, output: unknown) => Promise<void>
skipLoggingComplete?: boolean
}
@@ -29,11 +30,11 @@ export interface WorkflowInfo {
export async function executeWorkflow(
workflow: WorkflowInfo,
requestId: string,
input: any | undefined,
input: unknown | undefined,
actorUserId: string,
streamConfig?: ExecuteWorkflowOptions,
providedExecutionId?: string
): Promise<any> {
): Promise<ExecutionResult> {
if (!workflow.workspaceId) {
throw new Error(`Workflow ${workflow.id} has no workspaceId`)
}
@@ -71,7 +72,7 @@ export async function executeWorkflow(
callbacks: {
onStream: streamConfig?.onStream,
onBlockComplete: streamConfig?.onBlockComplete
? async (blockId: string, _blockName: string, _blockType: string, output: any) => {
? async (blockId: string, _blockName: string, _blockType: string, output: unknown) => {
await streamConfig.onBlockComplete!(blockId, output)
}
: undefined,
@@ -119,7 +120,7 @@ export async function executeWorkflow(
}
return result
} catch (error: any) {
} catch (error: unknown) {
logger.error(`[${requestId}] Workflow execution failed:`, error)
throw error
}

View File

@@ -19,8 +19,12 @@ import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { Executor } from '@/executor'
import { REFERENCE } from '@/executor/constants'
import type { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionCallbacks, IterationContext } from '@/executor/execution/types'
import type { ExecutionResult } from '@/executor/types'
import type {
ContextExtensions,
ExecutionCallbacks,
IterationContext,
} from '@/executor/execution/types'
import type { ExecutionResult, NormalizedBlockOutput } from '@/executor/types'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import { Serializer } from '@/serializer'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
@@ -41,7 +45,7 @@ export interface ExecuteWorkflowCoreOptions {
abortSignal?: AbortSignal
}
function parseVariableValueByType(value: any, type: string): any {
function parseVariableValueByType(value: unknown, type: string): unknown {
if (value === null || value === undefined) {
switch (type) {
case 'number':
@@ -262,7 +266,7 @@ export async function executeWorkflowCore(
const filteredEdges = edges
// Check if this is a resume execution before trigger resolution
const resumeFromSnapshot = (metadata as any).resumeFromSnapshot === true
const resumeFromSnapshot = metadata.resumeFromSnapshot === true
const resumePendingQueue = snapshot.state?.pendingQueue
let resolvedTriggerBlockId = triggerBlockId
@@ -321,7 +325,7 @@ export async function executeWorkflowCore(
blockId: string,
blockName: string,
blockType: string,
output: any,
output: { input?: unknown; output: NormalizedBlockOutput; executionTime: number },
iterationContext?: IterationContext
) => {
await loggingSession.onBlockComplete(blockId, blockName, blockType, output)
@@ -330,7 +334,7 @@ export async function executeWorkflowCore(
}
}
const contextExtensions: any = {
const contextExtensions: ContextExtensions = {
stream: !!onStream,
selectedOutputs,
executionId,
@@ -342,7 +346,12 @@ export async function executeWorkflowCore(
onStream,
resumeFromSnapshot,
resumePendingQueue,
remainingEdges: snapshot.state?.remainingEdges,
remainingEdges: snapshot.state?.remainingEdges?.map((edge) => ({
source: edge.source,
target: edge.target,
sourceHandle: edge.sourceHandle ?? undefined,
targetHandle: edge.targetHandle ?? undefined,
})),
dagIncomingEdges: snapshot.state?.dagIncomingEdges,
snapshotState: snapshot.state,
metadata,
@@ -363,7 +372,7 @@ export async function executeWorkflowCore(
// Convert initial workflow variables to their native types
if (workflowVariables) {
for (const [varId, variable] of Object.entries(workflowVariables)) {
const v = variable as any
const v = variable as { value?: unknown; type?: string }
if (v.value !== undefined && v.type) {
v.value = parseVariableValueByType(v.value, v.type)
}
@@ -432,18 +441,23 @@ export async function executeWorkflowCore(
})
return result
} catch (error: any) {
} catch (error: unknown) {
logger.error(`[${requestId}] Execution failed:`, error)
const executionResult = (error as any)?.executionResult
const errorWithResult = error as {
executionResult?: ExecutionResult
message?: string
stack?: string
}
const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: executionResult?.metadata?.duration || 0,
error: {
message: error.message || 'Execution failed',
stackTrace: error.stack,
message: errorWithResult?.message || 'Execution failed',
stackTrace: errorWithResult?.stack,
},
traceSpans,
})

View File

@@ -2,13 +2,14 @@ import { randomUUID } from 'crypto'
import { db } from '@sim/db'
import { pausedExecutions, resumeQueue, workflowExecutionLogs } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, desc, eq, inArray, lt, sql } from 'drizzle-orm'
import { and, asc, desc, eq, inArray, lt, type SQL, sql } from 'drizzle-orm'
import type { Edge } from 'reactflow'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult, PausePoint, SerializedSnapshot } from '@/executor/types'
import type { SerializedConnection } from '@/serializer/types'
const logger = createLogger('HumanInTheLoopManager')
@@ -18,7 +19,7 @@ interface ResumeQueueEntrySummary {
parentExecutionId: string
newExecutionId: string
contextId: string
resumeInput: any
resumeInput: unknown
status: string
queuedAt: string | null
claimedAt: string | null
@@ -69,7 +70,7 @@ interface PersistPauseResultArgs {
interface EnqueueResumeArgs {
executionId: string
contextId: string
resumeInput: any
resumeInput: unknown
userId: string
}
@@ -85,7 +86,7 @@ type EnqueueResumeResult =
resumeEntryId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
resumeInput: any
resumeInput: unknown
userId: string
}
@@ -94,7 +95,7 @@ interface StartResumeExecutionArgs {
resumeExecutionId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
resumeInput: any
resumeInput: unknown
userId: string
}
@@ -365,7 +366,7 @@ export class PauseResumeManager {
resumeExecutionId: string
pausedExecution: typeof pausedExecutions.$inferSelect
contextId: string
resumeInput: any
resumeInput: unknown
userId: string
}): Promise<ExecutionResult> {
const { resumeExecutionId, pausedExecution, contextId, resumeInput, userId } = args
@@ -408,9 +409,8 @@ export class PauseResumeManager {
const rawPauseBlockId = pausePoint.blockId ?? contextId
const pauseBlockId = PauseResumeManager.normalizePauseBlockId(rawPauseBlockId)
const dagIncomingEdgesFromSnapshot: Record<string, string[]> | undefined = (
baseSnapshot.state as any
)?.dagIncomingEdges
const dagIncomingEdgesFromSnapshot: Record<string, string[]> | undefined =
baseSnapshot.state?.dagIncomingEdges
const downstreamBlocks = dagIncomingEdgesFromSnapshot
? Object.entries(dagIncomingEdgesFromSnapshot)
@@ -424,9 +424,10 @@ export class PauseResumeManager {
.map(([nodeId]) => nodeId)
: baseSnapshot.workflow.connections
.filter(
(conn: any) => PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId
(conn: SerializedConnection) =>
PauseResumeManager.normalizePauseBlockId(conn.source) === pauseBlockId
)
.map((conn: any) => conn.target)
.map((conn: SerializedConnection) => conn.target)
logger.info('Found downstream blocks', {
pauseBlockId,
@@ -448,7 +449,7 @@ export class PauseResumeManager {
if (stateCopy) {
const dagIncomingEdges: Record<string, string[]> | undefined =
(stateCopy as any)?.dagIncomingEdges || dagIncomingEdgesFromSnapshot
stateCopy.dagIncomingEdges || dagIncomingEdgesFromSnapshot
// Calculate the pause duration (time from pause to resume)
const pauseDurationMs = pausedExecution.pausedAt
@@ -617,11 +618,11 @@ export class PauseResumeManager {
// If we didn't find any edges via the DAG snapshot, fall back to workflow connections
if (edgesToRemove.length === 0 && baseSnapshot.workflow.connections?.length) {
edgesToRemove = baseSnapshot.workflow.connections
.filter((conn: any) =>
.filter((conn: SerializedConnection) =>
completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source))
)
.map((conn: any) => ({
id: conn.id ?? `${conn.source}${conn.target}`,
.map((conn: SerializedConnection) => ({
id: `${conn.source}${conn.target}`,
source: conn.source,
target: conn.target,
sourceHandle: conn.sourceHandle,
@@ -630,11 +631,11 @@ export class PauseResumeManager {
}
} else {
edgesToRemove = baseSnapshot.workflow.connections
.filter((conn: any) =>
.filter((conn: SerializedConnection) =>
completedPauseContexts.has(PauseResumeManager.normalizePauseBlockId(conn.source))
)
.map((conn: any) => ({
id: conn.id ?? `${conn.source}${conn.target}`,
.map((conn: SerializedConnection) => ({
id: `${conn.source}${conn.target}`,
source: conn.source,
target: conn.target,
sourceHandle: conn.sourceHandle,
@@ -913,7 +914,7 @@ export class PauseResumeManager {
}): Promise<PausedExecutionSummary[]> {
const { workflowId, status } = options
let whereClause: any = eq(pausedExecutions.workflowId, workflowId)
let whereClause: SQL<unknown> | undefined = eq(pausedExecutions.workflowId, workflowId)
if (status) {
const statuses = Array.isArray(status)
@@ -924,7 +925,7 @@ export class PauseResumeManager {
if (statuses.length === 1) {
whereClause = and(whereClause, eq(pausedExecutions.status, statuses[0]))
} else if (statuses.length > 1) {
whereClause = and(whereClause, inArray(pausedExecutions.status, statuses as any))
whereClause = and(whereClause, inArray(pausedExecutions.status, statuses))
}
}
@@ -1129,16 +1130,16 @@ export class PauseResumeManager {
}
private static mapPausePoints(
pausePoints: any,
pausePoints: unknown,
queuePositions?: Map<string, number | null>,
latestEntries?: Map<string, ResumeQueueEntrySummary>
): PausePointWithQueue[] {
const record = pausePoints as Record<string, any>
const record = pausePoints as Record<string, PausePoint> | null
if (!record) {
return []
}
return Object.values(record).map((point: any) => {
return Object.values(record).map((point: PausePoint) => {
const queuePosition = queuePositions?.get(point.contextId ?? '') ?? null
const latestEntry = latestEntries?.get(point.contextId ?? '')

View File

@@ -1,7 +1,7 @@
import { createLogger } from '@sim/logger'
import JSZip from 'jszip'
import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import type { Variable, WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowImportExport')
@@ -14,12 +14,7 @@ export interface WorkflowExportData {
folderId?: string | null
}
state: WorkflowState
variables?: Array<{
id: string
name: string
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
value: any
}>
variables?: Record<string, Variable>
}
export interface FolderExportData {

View File

@@ -9,7 +9,7 @@ import {
workflowSubflows,
} from '@sim/db'
import { createLogger } from '@sim/logger'
import type { InferSelectModel } from 'drizzle-orm'
import type { InferInsertModel, InferSelectModel } from 'drizzle-orm'
import { and, desc, eq, sql } from 'drizzle-orm'
import type { Edge } from 'reactflow'
import { v4 as uuidv4 } from 'uuid'
@@ -22,6 +22,8 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowDBHelpers')
export type WorkflowDeploymentVersion = InferSelectModel<typeof workflowDeploymentVersion>
type WebhookRecord = InferSelectModel<typeof webhook>
type SubflowInsert = InferInsertModel<typeof workflowSubflows>
export interface WorkflowDeploymentVersionResponse {
id: string
@@ -43,7 +45,7 @@ export interface NormalizedWorkflowData {
export interface DeployedWorkflowData extends NormalizedWorkflowData {
deploymentVersionId: string
variables?: Record<string, any>
variables?: Record<string, unknown>
}
export async function blockExistsInDeployment(
@@ -96,7 +98,7 @@ export async function loadDeployedWorkflowState(workflowId: string): Promise<Dep
throw new Error(`Workflow ${workflowId} has no active deployment`)
}
const state = active.state as WorkflowState & { variables?: Record<string, any> }
const state = active.state as WorkflowState & { variables?: Record<string, unknown> }
return {
blocks: state.blocks || {},
@@ -336,7 +338,7 @@ export async function saveWorkflowToNormalizedTables(
// Start a transaction
await db.transaction(async (tx) => {
// Snapshot existing webhooks before deletion to preserve them through the cycle
let existingWebhooks: any[] = []
let existingWebhooks: WebhookRecord[] = []
try {
existingWebhooks = await tx.select().from(webhook).where(eq(webhook.workflowId, workflowId))
} catch (webhookError) {
@@ -392,7 +394,7 @@ export async function saveWorkflowToNormalizedTables(
}
// Insert subflows (loops and parallels)
const subflowInserts: any[] = []
const subflowInserts: SubflowInsert[] = []
// Add loops
Object.values(canonicalLoops).forEach((loop) => {
@@ -571,7 +573,7 @@ export async function deployWorkflow(params: {
const blockTypeCounts: Record<string, number> = {}
for (const block of Object.values(currentState.blocks)) {
const blockType = (block as any).type || 'unknown'
const blockType = block.type || 'unknown'
blockTypeCounts[blockType] = (blockTypeCounts[blockType] || 0) + 1
}
@@ -605,11 +607,33 @@ export async function deployWorkflow(params: {
}
}
/** Input state for ID regeneration - partial to handle external sources */
export interface RegenerateStateInput {
blocks?: Record<string, BlockState>
edges?: Edge[]
loops?: Record<string, Loop>
parallels?: Record<string, Parallel>
lastSaved?: number
variables?: Record<string, unknown>
metadata?: Record<string, unknown>
}
/** Output state after ID regeneration */
interface RegenerateStateOutput {
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
lastSaved: number
variables?: Record<string, unknown>
metadata?: Record<string, unknown>
}
/**
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
* Returns a new state with all IDs regenerated and references updated
*/
export function regenerateWorkflowStateIds(state: any): any {
export function regenerateWorkflowStateIds(state: RegenerateStateInput): RegenerateStateOutput {
// Create ID mappings
const blockIdMapping = new Map<string, string>()
const edgeIdMapping = new Map<string, string>()
@@ -624,7 +648,7 @@ export function regenerateWorkflowStateIds(state: any): any {
// Map edge IDs
;(state.edges || []).forEach((edge: any) => {
;(state.edges || []).forEach((edge: Edge) => {
edgeIdMapping.set(edge.id, crypto.randomUUID())
})
@@ -639,28 +663,28 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Second pass: Create new state with regenerated IDs and updated references
const newBlocks: Record<string, any> = {}
const newEdges: any[] = []
const newLoops: Record<string, any> = {}
const newParallels: Record<string, any> = {}
const newBlocks: Record<string, BlockState> = {}
const newEdges: Edge[] = []
const newLoops: Record<string, Loop> = {}
const newParallels: Record<string, Parallel> = {}
// Regenerate blocks with updated references
Object.entries(state.blocks || {}).forEach(([oldId, block]: [string, any]) => {
Object.entries(state.blocks || {}).forEach(([oldId, block]) => {
const newId = blockIdMapping.get(oldId)!
const newBlock = { ...block, id: newId }
const newBlock: BlockState = { ...block, id: newId }
// Update parentId reference if it exists
if (newBlock.data?.parentId) {
const newParentId = blockIdMapping.get(newBlock.data.parentId)
if (newParentId) {
newBlock.data.parentId = newParentId
newBlock.data = { ...newBlock.data, parentId: newParentId }
}
}
// Update any block references in subBlocks
if (newBlock.subBlocks) {
const updatedSubBlocks: Record<string, any> = {}
Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]: [string, any]) => {
const updatedSubBlocks: Record<string, BlockState['subBlocks'][string]> = {}
Object.entries(newBlock.subBlocks).forEach(([subId, subBlock]) => {
const updatedSubBlock = { ...subBlock }
// If subblock value contains block references, update them
@@ -668,7 +692,7 @@ export function regenerateWorkflowStateIds(state: any): any {
typeof updatedSubBlock.value === 'string' &&
blockIdMapping.has(updatedSubBlock.value)
) {
updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value)
updatedSubBlock.value = blockIdMapping.get(updatedSubBlock.value) ?? updatedSubBlock.value
}
updatedSubBlocks[subId] = updatedSubBlock
@@ -681,7 +705,7 @@ export function regenerateWorkflowStateIds(state: any): any {
// Regenerate edges with updated source/target references
;(state.edges || []).forEach((edge: any) => {
;(state.edges || []).forEach((edge: Edge) => {
const newId = edgeIdMapping.get(edge.id)!
const newSource = blockIdMapping.get(edge.source) || edge.source
const newTarget = blockIdMapping.get(edge.target) || edge.target
@@ -695,9 +719,9 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Regenerate loops with updated node references
Object.entries(state.loops || {}).forEach(([oldId, loop]: [string, any]) => {
Object.entries(state.loops || {}).forEach(([oldId, loop]) => {
const newId = loopIdMapping.get(oldId)!
const newLoop = { ...loop, id: newId }
const newLoop: Loop = { ...loop, id: newId }
// Update nodes array with new block IDs
if (newLoop.nodes) {
@@ -708,9 +732,9 @@ export function regenerateWorkflowStateIds(state: any): any {
})
// Regenerate parallels with updated node references
Object.entries(state.parallels || {}).forEach(([oldId, parallel]: [string, any]) => {
Object.entries(state.parallels || {}).forEach(([oldId, parallel]) => {
const newId = parallelIdMapping.get(oldId)!
const newParallel = { ...parallel, id: newId }
const newParallel: Parallel = { ...parallel, id: newId }
// Update nodes array with new block IDs
if (newParallel.nodes) {

View File

@@ -59,26 +59,36 @@ export interface ExportWorkflowState {
id: string
name: string
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
value: any
value: unknown
}>
}
}
/** Condition structure for sanitization */
interface SanitizedCondition {
id: string
title: string
value: string
}
/**
* Sanitize condition blocks by removing UI-specific metadata
* Returns cleaned JSON string (not parsed array)
*/
function sanitizeConditions(conditionsJson: string): string {
try {
const conditions = JSON.parse(conditionsJson)
const conditions: unknown = JSON.parse(conditionsJson)
if (!Array.isArray(conditions)) return conditionsJson
// Keep only id, title, and value - remove UI state
const cleaned = conditions.map((cond: any) => ({
id: cond.id,
title: cond.title,
value: cond.value || '',
}))
const cleaned: SanitizedCondition[] = conditions.map((cond: unknown) => {
const condition = cond as Record<string, unknown>
return {
id: String(condition.id ?? ''),
title: String(condition.title ?? ''),
value: String(condition.value ?? ''),
}
})
return JSON.stringify(cleaned)
} catch {
@@ -86,11 +96,50 @@ function sanitizeConditions(conditionsJson: string): string {
}
}
/** Tool input structure for sanitization */
interface ToolInput {
type: string
customToolId?: string
schema?: {
type?: string
function?: {
name: string
description?: string
parameters?: unknown
}
}
code?: string
title?: string
toolId?: string
usageControl?: string
isExpanded?: boolean
[key: string]: unknown
}
/** Sanitized tool output structure */
interface SanitizedTool {
type: string
customToolId?: string
usageControl?: string
title?: string
toolId?: string
schema?: {
type: string
function: {
name: string
description?: string
parameters?: unknown
}
}
code?: string
[key: string]: unknown
}
/**
* Sanitize tools array by removing UI state and redundant fields
*/
function sanitizeTools(tools: any[]): any[] {
return tools.map((tool) => {
function sanitizeTools(tools: ToolInput[]): SanitizedTool[] {
return tools.map((tool): SanitizedTool => {
if (tool.type === 'custom-tool') {
// New reference format: minimal fields only
if (tool.customToolId && !tool.schema && !tool.code) {
@@ -102,7 +151,7 @@ function sanitizeTools(tools: any[]): any[] {
}
// Legacy inline format: include all fields
const sanitized: any = {
const sanitized: SanitizedTool = {
type: tool.type,
title: tool.title,
toolId: tool.toolId,
@@ -129,23 +178,24 @@ function sanitizeTools(tools: any[]): any[] {
return sanitized
}
const { isExpanded, ...cleanTool } = tool
return cleanTool
const { isExpanded: _isExpanded, ...cleanTool } = tool
return cleanTool as SanitizedTool
})
}
/**
* Sort object keys recursively for consistent comparison
*/
function sortKeysRecursively(item: any): any {
function sortKeysRecursively(item: unknown): unknown {
if (Array.isArray(item)) {
return item.map(sortKeysRecursively)
}
if (item !== null && typeof item === 'object') {
return Object.keys(item)
const obj = item as Record<string, unknown>
return Object.keys(obj)
.sort()
.reduce((result: any, key: string) => {
result[key] = sortKeysRecursively(item[key])
.reduce((result: Record<string, unknown>, key: string) => {
result[key] = sortKeysRecursively(obj[key])
return result
}, {})
}
@@ -183,7 +233,7 @@ function sanitizeSubBlocks(
// Sort keys for consistent comparison
if (obj && typeof obj === 'object') {
sanitized[key] = sortKeysRecursively(obj)
sanitized[key] = sortKeysRecursively(obj) as Record<string, unknown>
return
}
} catch {
@@ -201,7 +251,7 @@ function sanitizeSubBlocks(
}
if (key === 'tools' && Array.isArray(subBlock.value)) {
sanitized[key] = sanitizeTools(subBlock.value)
sanitized[key] = sanitizeTools(subBlock.value as unknown as ToolInput[])
return
}
@@ -383,7 +433,7 @@ export function sanitizeForExport(state: WorkflowState): ExportWorkflowState {
// Use unified sanitization with env var preservation for export
const sanitizedState = sanitizeWorkflowForSharing(fullState, {
preserveEnvVars: true, // Keep {{ENV_VAR}} references in exported workflows
})
}) as ExportWorkflowState['state']
return {
version: '1.0',

View File

@@ -1,20 +1,40 @@
import { createLogger } from '@sim/logger'
import { getBlock } from '@/blocks/registry'
import { isCustomTool, isMcpTool } from '@/executor/constants'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import { getTool } from '@/tools/utils'
const logger = createLogger('WorkflowValidation')
/** Tool structure for validation */
interface AgentTool {
type: string
customToolId?: string
schema?: {
type?: string
function?: {
name?: string
parameters?: {
type?: string
properties?: Record<string, unknown>
}
}
}
code?: string
usageControl?: string
[key: string]: unknown
}
/**
* Checks if a custom tool has a valid inline schema
*/
function isValidCustomToolSchema(tool: any): boolean {
function isValidCustomToolSchema(tool: unknown): boolean {
try {
if (!tool || typeof tool !== 'object') return false
if (tool.type !== 'custom-tool') return true // non-custom tools are validated elsewhere
const t = tool as AgentTool
if (t.type !== 'custom-tool') return true // non-custom tools are validated elsewhere
const schema = tool.schema
const schema = t.schema
if (!schema || typeof schema !== 'object') return false
const fn = schema.function
if (!fn || typeof fn !== 'object') return false
@@ -34,14 +54,15 @@ function isValidCustomToolSchema(tool: any): boolean {
/**
* Checks if a custom tool is a valid reference-only format (new format)
*/
function isValidCustomToolReference(tool: any): boolean {
function isValidCustomToolReference(tool: unknown): boolean {
try {
if (!tool || typeof tool !== 'object') return false
if (tool.type !== 'custom-tool') return false
const t = tool as AgentTool
if (t.type !== 'custom-tool') return false
// Reference format: has customToolId but no inline schema/code
// This is valid - the tool will be loaded dynamically during execution
if (tool.customToolId && typeof tool.customToolId === 'string') {
if (t.customToolId && typeof t.customToolId === 'string') {
return true
}
@@ -51,14 +72,14 @@ function isValidCustomToolReference(tool: any): boolean {
}
}
export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
blocks: Record<string, any>
export function sanitizeAgentToolsInBlocks(blocks: Record<string, BlockState>): {
blocks: Record<string, BlockState>
warnings: string[]
} {
const warnings: string[] = []
// Shallow clone to avoid mutating callers
const sanitizedBlocks: Record<string, any> = { ...blocks }
const sanitizedBlocks: Record<string, BlockState> = { ...blocks }
for (const [blockId, block] of Object.entries(sanitizedBlocks)) {
try {
@@ -90,10 +111,11 @@ export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
const originalLength = value.length
const cleaned = value
.filter((tool: any) => {
.filter((tool: unknown) => {
// Allow non-custom tools to pass through as-is
if (!tool || typeof tool !== 'object') return false
if (tool.type !== 'custom-tool') return true
const t = tool as AgentTool
if (t.type !== 'custom-tool') return true
// Check if it's a valid reference-only format (new format)
if (isValidCustomToolReference(tool)) {
@@ -106,21 +128,22 @@ export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
logger.warn('Removing invalid custom tool from workflow', {
blockId,
blockName: block.name,
hasCustomToolId: !!tool.customToolId,
hasSchema: !!tool.schema,
hasCustomToolId: !!t.customToolId,
hasSchema: !!t.schema,
})
}
return ok
})
.map((tool: any) => {
if (tool.type === 'custom-tool') {
.map((tool: unknown) => {
const t = tool as AgentTool
if (t.type === 'custom-tool') {
// For reference-only tools, ensure usageControl default
if (!tool.usageControl) {
tool.usageControl = 'auto'
if (!t.usageControl) {
t.usageControl = 'auto'
}
// For inline tools (legacy), also ensure code default
if (!tool.customToolId && (!tool.code || typeof tool.code !== 'string')) {
tool.code = ''
if (!t.customToolId && (!t.code || typeof t.code !== 'string')) {
t.code = ''
}
}
return tool
@@ -132,13 +155,14 @@ export function sanitizeAgentToolsInBlocks(blocks: Record<string, any>): {
)
}
toolsSubBlock.value = cleaned
// Cast cleaned to the expected SubBlockState value type
// The value is a tools array but SubBlockState.value is typed narrowly
toolsSubBlock.value = cleaned as unknown as typeof toolsSubBlock.value
// Reassign in case caller uses object identity
sanitizedBlocks[blockId] = { ...block, subBlocks: { ...subBlocks, tools: toolsSubBlock } }
} catch (err: any) {
warnings.push(
`Block ${block?.name || blockId}: tools sanitation failed: ${err?.message || String(err)}`
)
} catch (err: unknown) {
const message = err instanceof Error ? err.message : String(err)
warnings.push(`Block ${block?.name || blockId}: tools sanitation failed: ${message}`)
}
}
@@ -177,7 +201,7 @@ export function validateWorkflowState(
}
// Validate each block
const sanitizedBlocks: Record<string, any> = {}
const sanitizedBlocks: Record<string, BlockState> = {}
let hasChanges = false
for (const [blockId, block] of Object.entries(workflowState.blocks)) {

View File

@@ -8,7 +8,15 @@ import { encodeSSE } from '@/lib/core/utils/sse'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { processStreamingBlockLogs } from '@/lib/tokenization'
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
import type { ExecutionResult } from '@/executor/types'
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
/**
* Extended streaming execution type that includes blockId on the execution.
* The runtime passes blockId but the base StreamingExecution type doesn't declare it.
*/
interface StreamingExecutionWithBlockId extends Omit<StreamingExecution, 'execution'> {
execution?: StreamingExecution['execution'] & { blockId?: string }
}
const logger = createLogger('WorkflowStreaming')
@@ -27,9 +35,9 @@ export interface StreamingResponseOptions {
userId: string
workspaceId?: string | null
isDeployed?: boolean
variables?: Record<string, any>
variables?: Record<string, unknown>
}
input: any
input: unknown
executingUserId: string
streamConfig: StreamingConfig
executionId?: string
@@ -41,7 +49,7 @@ interface StreamingState {
streamCompletionTimes: Map<string, number>
}
function extractOutputValue(output: any, path: string): any {
function extractOutputValue(output: unknown, path: string): unknown {
return traverseObjectPath(output, path)
}
@@ -54,11 +62,11 @@ function buildMinimalResult(
selectedOutputs: string[] | undefined,
streamedContent: Map<string, string>,
requestId: string
): { success: boolean; error?: string; output: Record<string, any> } {
): { success: boolean; error?: string; output: Record<string, unknown> } {
const minimalResult = {
success: result.success,
error: result.error,
output: {} as Record<string, any>,
output: {} as Record<string, unknown>,
}
if (!selectedOutputs?.length) {
@@ -88,7 +96,7 @@ function buildMinimalResult(
continue
}
const blockLog = result.logs.find((log: any) => log.blockId === blockId)
const blockLog = result.logs.find((log: BlockLog) => log.blockId === blockId)
if (!blockLog?.output) {
continue
}
@@ -99,16 +107,16 @@ function buildMinimalResult(
}
if (!minimalResult.output[blockId]) {
minimalResult.output[blockId] = Object.create(null)
minimalResult.output[blockId] = Object.create(null) as Record<string, unknown>
}
minimalResult.output[blockId][path] = value
;(minimalResult.output[blockId] as Record<string, unknown>)[path] = value
}
return minimalResult
}
function updateLogsWithStreamedContent(logs: any[], state: StreamingState): any[] {
return logs.map((log: any) => {
function updateLogsWithStreamedContent(logs: BlockLog[], state: StreamingState): BlockLog[] {
return logs.map((log: BlockLog) => {
if (!state.streamedContent.has(log.blockId)) {
return log
}
@@ -168,10 +176,10 @@ export async function createStreamingResponse(
state.processedOutputs.add(blockId)
}
const onStreamCallback = async (streamingExec: {
stream: ReadableStream
execution?: { blockId?: string }
}) => {
/**
* Callback for handling streaming execution events.
*/
const onStreamCallback = async (streamingExec: StreamingExecutionWithBlockId) => {
const blockId = streamingExec.execution?.blockId
if (!blockId) {
logger.warn(`[${requestId}] Streaming execution missing blockId`)
@@ -215,7 +223,7 @@ export async function createStreamingResponse(
}
}
const onBlockCompleteCallback = async (blockId: string, output: any) => {
const onBlockCompleteCallback = async (blockId: string, output: unknown) => {
if (!streamConfig.selectedOutputs?.length) {
return
}

View File

@@ -1,4 +1,7 @@
import type { CopilotWorkflowState } from '@/lib/workflows/sanitization/json-sanitizer'
import type {
CopilotBlockState,
CopilotWorkflowState,
} from '@/lib/workflows/sanitization/json-sanitizer'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
export interface EditOperation {
@@ -7,13 +10,12 @@ export interface EditOperation {
params?: {
type?: string
name?: string
outputs?: Record<string, any>
enabled?: boolean
triggerMode?: boolean
advancedMode?: boolean
inputs?: Record<string, any>
connections?: Record<string, any>
nestedNodes?: Record<string, any>
inputs?: Record<string, unknown>
connections?: Record<string, unknown>
nestedNodes?: Record<string, CopilotBlockState>
subflowId?: string
}
}
@@ -34,11 +36,11 @@ export interface WorkflowDiff {
* Returns map of blockId -> {block, parentId}
*/
function flattenBlocks(
blocks: Record<string, any>
): Record<string, { block: any; parentId?: string }> {
const flattened: Record<string, { block: any; parentId?: string }> = {}
blocks: Record<string, CopilotBlockState>
): Record<string, { block: CopilotBlockState; parentId?: string }> {
const flattened: Record<string, { block: CopilotBlockState; parentId?: string }> = {}
const processBlock = (blockId: string, block: any, parentId?: string) => {
const processBlock = (blockId: string, block: CopilotBlockState, parentId?: string) => {
flattened[blockId] = { block, parentId }
// Recursively process nested nodes
@@ -56,23 +58,20 @@ function flattenBlocks(
return flattened
}
/**
* Extract all edges from blocks with embedded connections (including nested)
*/
function extractAllEdgesFromBlocks(blocks: Record<string, any>): Array<{
interface ExtractedEdge {
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}> {
const edges: Array<{
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}> = []
}
const processBlockConnections = (block: any, blockId: string) => {
/**
* Extract all edges from blocks with embedded connections (including nested)
*/
function extractAllEdgesFromBlocks(blocks: Record<string, CopilotBlockState>): ExtractedEdge[] {
const edges: ExtractedEdge[] = []
const processBlockConnections = (block: CopilotBlockState, blockId: string) => {
if (block.connections) {
Object.entries(block.connections).forEach(([sourceHandle, targets]) => {
const targetArray = Array.isArray(targets) ? targets : [targets]
@@ -191,7 +190,6 @@ export function computeEditSequence(
subflowId: parentId,
type: block.type,
name: block.name,
outputs: block.outputs,
enabled: block.enabled !== undefined ? block.enabled : true,
}
@@ -296,7 +294,6 @@ export function computeEditSequence(
subflowId: endParentId,
type: endBlock.type,
name: endBlock.name,
outputs: endBlock.outputs,
enabled: endBlock.enabled !== undefined ? endBlock.enabled : true,
}
@@ -359,33 +356,22 @@ export function computeEditSequence(
* Extract input values from a block
* Works with sanitized format where inputs is Record<string, value>
*/
function extractInputValues(block: any): Record<string, any> {
function extractInputValues(block: CopilotBlockState): Record<string, unknown> {
// New sanitized format uses 'inputs' field
if (block.inputs) {
return { ...block.inputs }
}
// Fallback for any legacy data
if (block.subBlocks) {
return { ...block.subBlocks }
}
return {}
}
type ConnectionTarget = string | { block: string; handle: string }
/**
* Extract connections for a specific block from edges
*/
function extractConnections(
blockId: string,
edges: Array<{
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}>
): Record<string, any> {
const connections: Record<string, any> = {}
function extractConnections(blockId: string, edges: ExtractedEdge[]): Record<string, unknown> {
const connections: Record<string, ConnectionTarget[]> = {}
// Find all edges where this block is the source
const outgoingEdges = edges.filter((edge) => edge.source === blockId)
@@ -410,36 +396,29 @@ function extractConnections(
}
// Simplify single-element arrays to just the element
const result: Record<string, unknown> = {}
for (const handle in connections) {
if (Array.isArray(connections[handle]) && connections[handle].length === 1) {
connections[handle] = connections[handle][0]
if (connections[handle].length === 1) {
result[handle] = connections[handle][0]
} else {
result[handle] = connections[handle]
}
}
return connections
return result
}
/**
* Compute what changed in a block between two states
*/
function computeBlockChanges(
startBlock: any,
endBlock: any,
startBlock: CopilotBlockState,
endBlock: CopilotBlockState,
blockId: string,
startEdges: Array<{
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}>,
endEdges: Array<{
source: string
target: string
sourceHandle?: string | null
targetHandle?: string | null
}>
): Record<string, any> | null {
const changes: Record<string, any> = {}
startEdges: ExtractedEdge[],
endEdges: ExtractedEdge[]
): Record<string, unknown> | null {
const changes: Record<string, unknown> = {}
let hasChanges = false
// Check type change
@@ -497,10 +476,10 @@ function computeBlockChanges(
* Only returns fields that actually changed or were added
*/
function computeInputDelta(
startInputs: Record<string, any>,
endInputs: Record<string, any>
): Record<string, any> {
const delta: Record<string, any> = {}
startInputs: Record<string, unknown>,
endInputs: Record<string, unknown>
): Record<string, unknown> {
const delta: Record<string, unknown> = {}
for (const key in endInputs) {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) {

View File

@@ -6,6 +6,7 @@ import {
} from '@/lib/workflows/triggers/triggers'
import { getAllBlocks, getBlock } from '@/blocks'
import type { BlockConfig } from '@/blocks/types'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import { getTrigger } from '@/triggers'
const logger = createLogger('TriggerUtils')
@@ -34,12 +35,12 @@ export function isValidStartBlockType(blockType: string): blockType is ValidStar
/**
* Check if a workflow state has a valid start block
*/
export function hasValidStartBlockInState(state: any): boolean {
export function hasValidStartBlockInState(state: WorkflowState | null | undefined): boolean {
if (!state?.blocks) {
return false
}
const startBlock = Object.values(state.blocks).find((block: any) => {
const startBlock = Object.values(state.blocks).find((block: BlockState) => {
const blockType = block?.type
return isValidStartBlockType(blockType)
})
@@ -50,7 +51,7 @@ export function hasValidStartBlockInState(state: any): boolean {
/**
* Generates mock data based on the output type definition
*/
function generateMockValue(type: string, description?: string, fieldName?: string): any {
function generateMockValue(type: string, _description?: string, fieldName?: string): unknown {
const name = fieldName || 'value'
switch (type) {
@@ -88,18 +89,19 @@ function generateMockValue(type: string, description?: string, fieldName?: strin
/**
* Recursively processes nested output structures
*/
function processOutputField(key: string, field: any, depth = 0, maxDepth = 10): any {
function processOutputField(key: string, field: unknown, depth = 0, maxDepth = 10): unknown {
// Prevent infinite recursion
if (depth > maxDepth) {
return null
}
if (field && typeof field === 'object' && 'type' in field) {
return generateMockValue(field.type, field.description, key)
const typedField = field as { type: string; description?: string }
return generateMockValue(typedField.type, typedField.description, key)
}
if (field && typeof field === 'object' && !Array.isArray(field)) {
const nestedObject: Record<string, any> = {}
const nestedObject: Record<string, unknown> = {}
for (const [nestedKey, nestedField] of Object.entries(field)) {
nestedObject[nestedKey] = processOutputField(nestedKey, nestedField, depth + 1, maxDepth)
}
@@ -112,8 +114,8 @@ function processOutputField(key: string, field: any, depth = 0, maxDepth = 10):
/**
* Generates mock payload from outputs object
*/
function generateMockPayloadFromOutputs(outputs: Record<string, any>): Record<string, any> {
const mockPayload: Record<string, any> = {}
function generateMockPayloadFromOutputs(outputs: Record<string, unknown>): Record<string, unknown> {
const mockPayload: Record<string, unknown> = {}
for (const [key, output] of Object.entries(outputs)) {
if (key === 'visualization') {
@@ -129,8 +131,8 @@ function generateMockPayloadFromOutputs(outputs: Record<string, any>): Record<st
* Generates a mock payload based on outputs definition
*/
export function generateMockPayloadFromOutputsDefinition(
outputs: Record<string, any>
): Record<string, any> {
outputs: Record<string, unknown>
): Record<string, unknown> {
return generateMockPayloadFromOutputs(outputs)
}
@@ -395,8 +397,8 @@ export function triggerNeedsMockPayload<T extends { type: string }>(
*/
export function extractTriggerMockPayload<
T extends { type: string; subBlocks?: Record<string, unknown> },
>(trigger: StartBlockCandidate<T>): any {
const subBlocks = trigger.block.subBlocks as Record<string, any> | undefined
>(trigger: StartBlockCandidate<T>): unknown {
const subBlocks = trigger.block.subBlocks as Record<string, { value?: unknown }> | undefined
// Determine the trigger ID
let triggerId: string

View File

@@ -16,7 +16,11 @@ export class VariableManager {
* @param forExecution Whether this conversion is for execution (true) or storage/display (false)
* @returns The value converted to its appropriate type
*/
private static convertToNativeType(value: any, type: VariableType, forExecution = false): any {
private static convertToNativeType(
value: unknown,
type: VariableType,
forExecution = false
): unknown {
// Special handling for empty input values during storage
if (value === '') {
return value // Return empty string for all types during storage
@@ -38,7 +42,8 @@ export class VariableManager {
}
// Remove quotes from string values if present (used by multiple types)
const unquoted = typeof value === 'string' ? value.replace(/^["'](.*)["']$/s, '$1') : value
const unquoted: unknown =
typeof value === 'string' ? value.replace(/^["'](.*)["']$/s, '$1') : value
switch (type) {
case 'string': // Handle string type the same as plain for compatibility
@@ -117,7 +122,7 @@ export class VariableManager {
* @returns The formatted string value
*/
private static formatValue(
value: any,
value: unknown,
type: VariableType,
context: 'editor' | 'text' | 'code'
): string {
@@ -161,7 +166,7 @@ export class VariableManager {
* Parses user input and converts it to the appropriate storage format
* based on the variable type.
*/
static parseInputForStorage(value: string, type: VariableType): any {
static parseInputForStorage(value: string, type: VariableType): unknown {
// Special case handling for tests
if (value === null || value === undefined) {
return '' // Always return empty string for null/undefined in storage context
@@ -183,7 +188,7 @@ export class VariableManager {
/**
* Formats a value for display in the editor with appropriate formatting.
*/
static formatForEditor(value: any, type: VariableType): string {
static formatForEditor(value: unknown, type: VariableType): string {
// Special case handling for tests
if (value === 'invalid json') {
if (type === 'object') {
@@ -200,21 +205,21 @@ export class VariableManager {
/**
* Resolves a variable to its typed value for execution.
*/
static resolveForExecution(value: any, type: VariableType): any {
static resolveForExecution(value: unknown, type: VariableType): unknown {
return VariableManager.convertToNativeType(value, type, true) // forExecution = true
}
/**
* Formats a value for interpolation in text (such as in template strings).
*/
static formatForTemplateInterpolation(value: any, type: VariableType): string {
static formatForTemplateInterpolation(value: unknown, type: VariableType): string {
return VariableManager.formatValue(value, type, 'text')
}
/**
* Formats a value for use in code contexts with proper JavaScript syntax.
*/
static formatForCodeContext(value: any, type: VariableType): string {
static formatForCodeContext(value: unknown, type: VariableType): string {
// Special handling for null/undefined in code context
if (value === null) return 'null'
if (value === undefined) return 'undefined'

View File

@@ -70,16 +70,11 @@ async function exportWorkflow(workflowId: string, outputFile?: string): Promise<
process.exit(1)
}
// Convert variables to array format
let workflowVariables: any[] = []
if (workflowData.variables && typeof workflowData.variables === 'object') {
workflowVariables = Object.values(workflowData.variables).map((v: any) => ({
id: v.id,
name: v.name,
type: v.type,
value: v.value,
}))
}
// Get variables in Record format (as stored in database)
type VariableType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
const workflowVariables = workflowData.variables as
| Record<string, { id: string; name: string; type: VariableType; value: unknown }>
| undefined
// Prepare export state - match the exact format from the UI
const workflowState = {

View File

@@ -391,7 +391,7 @@ describe('Serializer', () => {
expect(toolsParam).toBeDefined()
// Parse tools to verify content
const tools = JSON.parse(toolsParam)
const tools = JSON.parse(toolsParam as string)
expect(tools).toHaveLength(2)
// Check custom tool

View File

@@ -218,7 +218,7 @@ export class Serializer {
position: block.position,
config: {
tool: '', // Loop blocks don't have tools
params: block.data || {}, // Preserve the block data (parallelType, count, etc.)
params: (block.data || {}) as Record<string, unknown>, // Preserve the block data (parallelType, count, etc.)
},
inputs: {},
outputs: block.outputs,

View File

@@ -1,4 +1,4 @@
import type { BlockOutput, ParamType } from '@/blocks/types'
import type { OutputFieldDefinition, ParamType } from '@/blocks/types'
import type { Position } from '@/stores/workflows/workflow/types'
export interface SerializedWorkflow {
@@ -25,10 +25,10 @@ export interface SerializedBlock {
position: Position
config: {
tool: string
params: Record<string, any>
params: Record<string, unknown>
}
inputs: Record<string, ParamType>
outputs: Record<string, BlockOutput>
outputs: Record<string, OutputFieldDefinition>
metadata?: {
id: string
name?: string

View File

@@ -0,0 +1,96 @@
export const BLOCK_OPERATIONS = {
UPDATE_POSITION: 'update-position',
UPDATE_NAME: 'update-name',
TOGGLE_ENABLED: 'toggle-enabled',
UPDATE_PARENT: 'update-parent',
UPDATE_WIDE: 'update-wide',
UPDATE_ADVANCED_MODE: 'update-advanced-mode',
UPDATE_TRIGGER_MODE: 'update-trigger-mode',
TOGGLE_HANDLES: 'toggle-handles',
} as const
export type BlockOperation = (typeof BLOCK_OPERATIONS)[keyof typeof BLOCK_OPERATIONS]
export const BLOCKS_OPERATIONS = {
BATCH_UPDATE_POSITIONS: 'batch-update-positions',
BATCH_ADD_BLOCKS: 'batch-add-blocks',
BATCH_REMOVE_BLOCKS: 'batch-remove-blocks',
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
BATCH_UPDATE_PARENT: 'batch-update-parent',
} as const
export type BlocksOperation = (typeof BLOCKS_OPERATIONS)[keyof typeof BLOCKS_OPERATIONS]
export const EDGE_OPERATIONS = {
ADD: 'add',
REMOVE: 'remove',
} as const
export type EdgeOperation = (typeof EDGE_OPERATIONS)[keyof typeof EDGE_OPERATIONS]
export const EDGES_OPERATIONS = {
BATCH_ADD_EDGES: 'batch-add-edges',
BATCH_REMOVE_EDGES: 'batch-remove-edges',
} as const
export type EdgesOperation = (typeof EDGES_OPERATIONS)[keyof typeof EDGES_OPERATIONS]
export const SUBFLOW_OPERATIONS = {
ADD: 'add',
REMOVE: 'remove',
UPDATE: 'update',
} as const
export type SubflowOperation = (typeof SUBFLOW_OPERATIONS)[keyof typeof SUBFLOW_OPERATIONS]
export const VARIABLE_OPERATIONS = {
ADD: 'add',
REMOVE: 'remove',
UPDATE: 'variable-update',
} as const
export type VariableOperation = (typeof VARIABLE_OPERATIONS)[keyof typeof VARIABLE_OPERATIONS]
export const WORKFLOW_OPERATIONS = {
REPLACE_STATE: 'replace-state',
} as const
export type WorkflowOperation = (typeof WORKFLOW_OPERATIONS)[keyof typeof WORKFLOW_OPERATIONS]
export const SUBBLOCK_OPERATIONS = {
UPDATE: 'subblock-update',
} as const
export type SubblockOperation = (typeof SUBBLOCK_OPERATIONS)[keyof typeof SUBBLOCK_OPERATIONS]
export const OPERATION_TARGETS = {
BLOCK: 'block',
BLOCKS: 'blocks',
EDGE: 'edge',
EDGES: 'edges',
SUBBLOCK: 'subblock',
SUBFLOW: 'subflow',
VARIABLE: 'variable',
WORKFLOW: 'workflow',
} as const
export type OperationTarget = (typeof OPERATION_TARGETS)[keyof typeof OPERATION_TARGETS]
/** Undo/Redo operation types (includes some socket operations + undo-specific ones) */
export const UNDO_REDO_OPERATIONS = {
BATCH_ADD_BLOCKS: 'batch-add-blocks',
BATCH_REMOVE_BLOCKS: 'batch-remove-blocks',
BATCH_ADD_EDGES: 'batch-add-edges',
BATCH_REMOVE_EDGES: 'batch-remove-edges',
BATCH_MOVE_BLOCKS: 'batch-move-blocks',
UPDATE_PARENT: 'update-parent',
BATCH_UPDATE_PARENT: 'batch-update-parent',
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
APPLY_DIFF: 'apply-diff',
ACCEPT_DIFF: 'accept-diff',
REJECT_DIFF: 'reject-diff',
} as const
export type UndoRedoOperation = (typeof UNDO_REDO_OPERATIONS)[keyof typeof UNDO_REDO_OPERATIONS]

View File

@@ -7,6 +7,16 @@ import postgres from 'postgres'
import { env } from '@/lib/core/config/env'
import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import {
BLOCK_OPERATIONS,
BLOCKS_OPERATIONS,
EDGE_OPERATIONS,
EDGES_OPERATIONS,
OPERATION_TARGETS,
SUBFLOW_OPERATIONS,
VARIABLE_OPERATIONS,
WORKFLOW_OPERATIONS,
} from '@/socket/constants'
const logger = createLogger('SocketDatabase')
@@ -155,7 +165,7 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
try {
const { operation: op, target, payload, timestamp, userId } = operation
if (op === 'update-position' && Math.random() < 0.01) {
if (op === BLOCK_OPERATIONS.UPDATE_POSITION && Math.random() < 0.01) {
logger.debug('Socket DB operation sample:', {
operation: op,
target,
@@ -170,22 +180,25 @@ export async function persistWorkflowOperation(workflowId: string, operation: an
.where(eq(workflow.id, workflowId))
switch (target) {
case 'block':
case OPERATION_TARGETS.BLOCK:
await handleBlockOperationTx(tx, workflowId, op, payload)
break
case 'blocks':
case OPERATION_TARGETS.BLOCKS:
await handleBlocksOperationTx(tx, workflowId, op, payload)
break
case 'edge':
case OPERATION_TARGETS.EDGE:
await handleEdgeOperationTx(tx, workflowId, op, payload)
break
case 'subflow':
case OPERATION_TARGETS.EDGES:
await handleEdgesOperationTx(tx, workflowId, op, payload)
break
case OPERATION_TARGETS.SUBFLOW:
await handleSubflowOperationTx(tx, workflowId, op, payload)
break
case 'variable':
case OPERATION_TARGETS.VARIABLE:
await handleVariableOperationTx(tx, workflowId, op, payload)
break
case 'workflow':
case OPERATION_TARGETS.WORKFLOW:
await handleWorkflowOperationTx(tx, workflowId, op, payload)
break
default:
@@ -219,7 +232,7 @@ async function handleBlockOperationTx(
payload: any
) {
switch (operation) {
case 'update-position': {
case BLOCK_OPERATIONS.UPDATE_POSITION: {
if (!payload.id || !payload.position) {
throw new Error('Missing required fields for update position operation')
}
@@ -244,7 +257,7 @@ async function handleBlockOperationTx(
break
}
case 'update-name': {
case BLOCK_OPERATIONS.UPDATE_NAME: {
if (!payload.id || !payload.name) {
throw new Error('Missing required fields for update name operation')
}
@@ -266,7 +279,7 @@ async function handleBlockOperationTx(
break
}
case 'toggle-enabled': {
case BLOCK_OPERATIONS.TOGGLE_ENABLED: {
if (!payload.id) {
throw new Error('Missing block ID for toggle enabled operation')
}
@@ -296,7 +309,7 @@ async function handleBlockOperationTx(
break
}
case 'update-parent': {
case BLOCK_OPERATIONS.UPDATE_PARENT: {
if (!payload.id) {
throw new Error('Missing block ID for update parent operation')
}
@@ -361,7 +374,7 @@ async function handleBlockOperationTx(
break
}
case 'update-advanced-mode': {
case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE: {
if (!payload.id || payload.advancedMode === undefined) {
throw new Error('Missing required fields for update advanced mode operation')
}
@@ -383,7 +396,7 @@ async function handleBlockOperationTx(
break
}
case 'update-trigger-mode': {
case BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE: {
if (!payload.id || payload.triggerMode === undefined) {
throw new Error('Missing required fields for update trigger mode operation')
}
@@ -405,7 +418,7 @@ async function handleBlockOperationTx(
break
}
case 'toggle-handles': {
case BLOCK_OPERATIONS.TOGGLE_HANDLES: {
if (!payload.id || payload.horizontalHandles === undefined) {
throw new Error('Missing required fields for toggle handles operation')
}
@@ -442,7 +455,7 @@ async function handleBlocksOperationTx(
payload: any
) {
switch (operation) {
case 'batch-update-positions': {
case BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS: {
const { updates } = payload
if (!Array.isArray(updates) || updates.length === 0) {
return
@@ -463,7 +476,7 @@ async function handleBlocksOperationTx(
break
}
case 'batch-add-blocks': {
case BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS: {
const { blocks, edges, loops, parallels } = payload
logger.info(`Batch adding blocks to workflow ${workflowId}`, {
@@ -575,7 +588,7 @@ async function handleBlocksOperationTx(
break
}
case 'batch-remove-blocks': {
case BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS: {
const { ids } = payload
if (!Array.isArray(ids) || ids.length === 0) {
return
@@ -690,6 +703,135 @@ async function handleBlocksOperationTx(
break
}
case BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED: {
const { blockIds } = payload
if (!Array.isArray(blockIds) || blockIds.length === 0) {
return
}
logger.info(
`Batch toggling enabled state for ${blockIds.length} blocks in workflow ${workflowId}`
)
const blocks = await tx
.select({ id: workflowBlocks.id, enabled: workflowBlocks.enabled })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
for (const block of blocks) {
await tx
.update(workflowBlocks)
.set({
enabled: !block.enabled,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled enabled state for ${blocks.length} blocks`)
break
}
case BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES: {
const { blockIds } = payload
if (!Array.isArray(blockIds) || blockIds.length === 0) {
return
}
logger.info(`Batch toggling handles for ${blockIds.length} blocks in workflow ${workflowId}`)
const blocks = await tx
.select({ id: workflowBlocks.id, horizontalHandles: workflowBlocks.horizontalHandles })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
for (const block of blocks) {
await tx
.update(workflowBlocks)
.set({
horizontalHandles: !block.horizontalHandles,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled handles for ${blocks.length} blocks`)
break
}
case BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT: {
const { updates } = payload
if (!Array.isArray(updates) || updates.length === 0) {
return
}
logger.info(`Batch updating parent for ${updates.length} blocks in workflow ${workflowId}`)
for (const update of updates) {
const { id, parentId, position } = update
if (!id) continue
// Fetch current parent to update subflow node lists
const [existing] = await tx
.select({
id: workflowBlocks.id,
parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'`,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (!existing) {
logger.warn(`Block ${id} not found for batch-update-parent`)
continue
}
const isRemovingFromParent = !parentId
// Get current data and position
const [currentBlock] = await tx
.select({
data: workflowBlocks.data,
positionX: workflowBlocks.positionX,
positionY: workflowBlocks.positionY,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
const currentData = currentBlock?.data || {}
const updatedData = isRemovingFromParent
? {}
: {
...currentData,
...(parentId ? { parentId, extent: 'parent' } : {}),
}
await tx
.update(workflowBlocks)
.set({
positionX: position?.x ?? currentBlock?.positionX ?? 0,
positionY: position?.y ?? currentBlock?.positionY ?? 0,
data: updatedData,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
// If the block now has a parent, update the new parent's subflow node list
if (parentId) {
await updateSubflowNodeList(tx, workflowId, parentId)
}
// If the block had a previous parent, update that parent's node list as well
if (existing?.parentId && existing.parentId !== parentId) {
await updateSubflowNodeList(tx, workflowId, existing.parentId)
}
}
logger.debug(`Batch updated parent for ${updates.length} blocks`)
break
}
default:
throw new Error(`Unsupported blocks operation: ${operation}`)
}
@@ -697,7 +839,7 @@ async function handleBlocksOperationTx(
async function handleEdgeOperationTx(tx: any, workflowId: string, operation: string, payload: any) {
switch (operation) {
case 'add': {
case EDGE_OPERATIONS.ADD: {
// Validate required fields
if (!payload.id || !payload.source || !payload.target) {
throw new Error('Missing required fields for add edge operation')
@@ -716,7 +858,7 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str
break
}
case 'remove': {
case EDGE_OPERATIONS.REMOVE: {
if (!payload.id) {
throw new Error('Missing edge ID for remove operation')
}
@@ -740,6 +882,60 @@ async function handleEdgeOperationTx(tx: any, workflowId: string, operation: str
}
}
async function handleEdgesOperationTx(
tx: any,
workflowId: string,
operation: string,
payload: any
) {
switch (operation) {
case EDGES_OPERATIONS.BATCH_REMOVE_EDGES: {
const { ids } = payload
if (!Array.isArray(ids) || ids.length === 0) {
logger.debug('No edge IDs provided for batch remove')
return
}
logger.info(`Batch removing ${ids.length} edges from workflow ${workflowId}`)
await tx
.delete(workflowEdges)
.where(and(eq(workflowEdges.workflowId, workflowId), inArray(workflowEdges.id, ids)))
logger.debug(`Batch removed ${ids.length} edges from workflow ${workflowId}`)
break
}
case EDGES_OPERATIONS.BATCH_ADD_EDGES: {
const { edges } = payload
if (!Array.isArray(edges) || edges.length === 0) {
logger.debug('No edges provided for batch add')
return
}
logger.info(`Batch adding ${edges.length} edges to workflow ${workflowId}`)
const edgeValues = edges.map((edge: Record<string, unknown>) => ({
id: edge.id as string,
workflowId,
sourceBlockId: edge.source as string,
targetBlockId: edge.target as string,
sourceHandle: (edge.sourceHandle as string | null) || null,
targetHandle: (edge.targetHandle as string | null) || null,
}))
await tx.insert(workflowEdges).values(edgeValues)
logger.debug(`Batch added ${edges.length} edges to workflow ${workflowId}`)
break
}
default:
logger.warn(`Unknown edges operation: ${operation}`)
throw new Error(`Unsupported edges operation: ${operation}`)
}
}
async function handleSubflowOperationTx(
tx: any,
workflowId: string,
@@ -747,7 +943,7 @@ async function handleSubflowOperationTx(
payload: any
) {
switch (operation) {
case 'update': {
case SUBFLOW_OPERATIONS.UPDATE: {
if (!payload.id || !payload.config) {
throw new Error('Missing required fields for update subflow operation')
}
@@ -874,7 +1070,7 @@ async function handleVariableOperationTx(
const currentVariables = (workflowData[0].variables as Record<string, any>) || {}
switch (operation) {
case 'add': {
case VARIABLE_OPERATIONS.ADD: {
if (!payload.id || !payload.name || payload.type === undefined) {
throw new Error('Missing required fields for add variable operation')
}
@@ -903,7 +1099,7 @@ async function handleVariableOperationTx(
break
}
case 'remove': {
case VARIABLE_OPERATIONS.REMOVE: {
if (!payload.variableId) {
throw new Error('Missing variable ID for remove operation')
}
@@ -937,7 +1133,7 @@ async function handleWorkflowOperationTx(
payload: any
) {
switch (operation) {
case 'replace-state': {
case WORKFLOW_OPERATIONS.REPLACE_STATE: {
if (!payload.state) {
throw new Error('Missing state for replace-state operation')
}

View File

@@ -1,5 +1,14 @@
import { createLogger } from '@sim/logger'
import { ZodError } from 'zod'
import {
BLOCK_OPERATIONS,
BLOCKS_OPERATIONS,
EDGES_OPERATIONS,
OPERATION_TARGETS,
VARIABLE_OPERATIONS,
type VariableOperation,
WORKFLOW_OPERATIONS,
} from '@/socket/constants'
import { persistWorkflowOperation } from '@/socket/database/operations'
import type { HandlerDependencies } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
@@ -45,7 +54,8 @@ export function setupOperationsHandlers(
// For position updates, preserve client timestamp to maintain ordering
// For other operations, use server timestamp for consistency
const isPositionUpdate = operation === 'update-position' && target === 'block'
const isPositionUpdate =
operation === BLOCK_OPERATIONS.UPDATE_POSITION && target === OPERATION_TARGETS.BLOCK
const commitPositionUpdate =
isPositionUpdate && 'commit' in payload ? payload.commit === true : false
const operationTimestamp = isPositionUpdate ? timestamp : Date.now()
@@ -145,7 +155,10 @@ export function setupOperationsHandlers(
return
}
if (target === 'blocks' && operation === 'batch-update-positions') {
if (
target === OPERATION_TARGETS.BLOCKS &&
operation === BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS
) {
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
@@ -184,8 +197,12 @@ export function setupOperationsHandlers(
return
}
if (target === 'variable' && ['add', 'remove'].includes(operation)) {
// Persist first, then broadcast
if (
target === OPERATION_TARGETS.VARIABLE &&
([VARIABLE_OPERATIONS.ADD, VARIABLE_OPERATIONS.REMOVE] as VariableOperation[]).includes(
operation as VariableOperation
)
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
@@ -222,7 +239,10 @@ export function setupOperationsHandlers(
return
}
if (target === 'workflow' && operation === 'replace-state') {
if (
target === OPERATION_TARGETS.WORKFLOW &&
operation === WORKFLOW_OPERATIONS.REPLACE_STATE
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
@@ -259,7 +279,7 @@ export function setupOperationsHandlers(
return
}
if (target === 'blocks' && operation === 'batch-add-blocks') {
if (target === OPERATION_TARGETS.BLOCKS && operation === BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS) {
await persistWorkflowOperation(workflowId, {
operation,
target,
@@ -288,7 +308,164 @@ export function setupOperationsHandlers(
return
}
if (target === 'blocks' && operation === 'batch-remove-blocks') {
if (
target === OPERATION_TARGETS.BLOCKS &&
operation === BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (target === OPERATION_TARGETS.EDGES && operation === EDGES_OPERATIONS.BATCH_REMOVE_EDGES) {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (
target === OPERATION_TARGETS.BLOCKS &&
operation === BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (
target === OPERATION_TARGETS.BLOCKS &&
operation === BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (
target === OPERATION_TARGETS.BLOCKS &&
operation === BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT
) {
await persistWorkflowOperation(workflowId, {
operation,
target,
payload,
timestamp: operationTimestamp,
userId: session.userId,
})
room.lastModified = Date.now()
socket.to(workflowId).emit('workflow-operation', {
operation,
target,
payload,
timestamp: operationTimestamp,
senderId: socket.id,
userId: session.userId,
userName: session.userName,
metadata: { workflowId, operationId: crypto.randomUUID() },
})
if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
}
return
}
if (target === OPERATION_TARGETS.EDGES && operation === EDGES_OPERATIONS.BATCH_ADD_EDGES) {
await persistWorkflowOperation(workflowId, {
operation,
target,

View File

@@ -16,6 +16,11 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'batch-update-positions',
'batch-add-blocks',
'batch-remove-blocks',
'batch-add-edges',
'batch-remove-edges',
'batch-toggle-enabled',
'batch-toggle-handles',
'batch-update-parent',
'update-name',
'toggle-enabled',
'update-parent',
@@ -33,6 +38,11 @@ const ROLE_PERMISSIONS: Record<string, string[]> = {
'batch-update-positions',
'batch-add-blocks',
'batch-remove-blocks',
'batch-add-edges',
'batch-remove-edges',
'batch-toggle-enabled',
'batch-toggle-handles',
'batch-update-parent',
'update-name',
'toggle-enabled',
'update-parent',

View File

@@ -1,11 +1,20 @@
import { z } from 'zod'
import {
BLOCK_OPERATIONS,
BLOCKS_OPERATIONS,
EDGE_OPERATIONS,
EDGES_OPERATIONS,
OPERATION_TARGETS,
SUBFLOW_OPERATIONS,
VARIABLE_OPERATIONS,
WORKFLOW_OPERATIONS,
} from '@/socket/constants'
const PositionSchema = z.object({
x: z.number(),
y: z.number(),
})
// Schema for auto-connect edge data
const AutoConnectEdgeSchema = z.object({
id: z.string(),
source: z.string(),
@@ -17,16 +26,16 @@ const AutoConnectEdgeSchema = z.object({
export const BlockOperationSchema = z.object({
operation: z.enum([
'update-position',
'update-name',
'toggle-enabled',
'update-parent',
'update-wide',
'update-advanced-mode',
'update-trigger-mode',
'toggle-handles',
BLOCK_OPERATIONS.UPDATE_POSITION,
BLOCK_OPERATIONS.UPDATE_NAME,
BLOCK_OPERATIONS.TOGGLE_ENABLED,
BLOCK_OPERATIONS.UPDATE_PARENT,
BLOCK_OPERATIONS.UPDATE_WIDE,
BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
BLOCK_OPERATIONS.UPDATE_TRIGGER_MODE,
BLOCK_OPERATIONS.TOGGLE_HANDLES,
]),
target: z.literal('block'),
target: z.literal(OPERATION_TARGETS.BLOCK),
payload: z.object({
id: z.string(),
type: z.string().optional(),
@@ -49,8 +58,8 @@ export const BlockOperationSchema = z.object({
})
export const BatchPositionUpdateSchema = z.object({
operation: z.literal('batch-update-positions'),
target: z.literal('blocks'),
operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
updates: z.array(
z.object({
@@ -64,8 +73,8 @@ export const BatchPositionUpdateSchema = z.object({
})
export const EdgeOperationSchema = z.object({
operation: z.enum(['add', 'remove']),
target: z.literal('edge'),
operation: z.enum([EDGE_OPERATIONS.ADD, EDGE_OPERATIONS.REMOVE]),
target: z.literal(OPERATION_TARGETS.EDGE),
payload: z.object({
id: z.string(),
source: z.string().optional(),
@@ -78,8 +87,8 @@ export const EdgeOperationSchema = z.object({
})
export const SubflowOperationSchema = z.object({
operation: z.enum(['add', 'remove', 'update']),
target: z.literal('subflow'),
operation: z.enum([SUBFLOW_OPERATIONS.ADD, SUBFLOW_OPERATIONS.REMOVE, SUBFLOW_OPERATIONS.UPDATE]),
target: z.literal(OPERATION_TARGETS.SUBFLOW),
payload: z.object({
id: z.string(),
type: z.enum(['loop', 'parallel']).optional(),
@@ -91,8 +100,8 @@ export const SubflowOperationSchema = z.object({
export const VariableOperationSchema = z.union([
z.object({
operation: z.literal('add'),
target: z.literal('variable'),
operation: z.literal(VARIABLE_OPERATIONS.ADD),
target: z.literal(OPERATION_TARGETS.VARIABLE),
payload: z.object({
id: z.string(),
name: z.string(),
@@ -104,8 +113,8 @@ export const VariableOperationSchema = z.union([
operationId: z.string().optional(),
}),
z.object({
operation: z.literal('remove'),
target: z.literal('variable'),
operation: z.literal(VARIABLE_OPERATIONS.REMOVE),
target: z.literal(OPERATION_TARGETS.VARIABLE),
payload: z.object({
variableId: z.string(),
}),
@@ -115,8 +124,8 @@ export const VariableOperationSchema = z.union([
])
export const WorkflowStateOperationSchema = z.object({
operation: z.literal('replace-state'),
target: z.literal('workflow'),
operation: z.literal(WORKFLOW_OPERATIONS.REPLACE_STATE),
target: z.literal(OPERATION_TARGETS.WORKFLOW),
payload: z.object({
state: z.any(),
}),
@@ -125,8 +134,8 @@ export const WorkflowStateOperationSchema = z.object({
})
export const BatchAddBlocksSchema = z.object({
operation: z.literal('batch-add-blocks'),
target: z.literal('blocks'),
operation: z.literal(BLOCKS_OPERATIONS.BATCH_ADD_BLOCKS),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
blocks: z.array(z.record(z.any())),
edges: z.array(AutoConnectEdgeSchema).optional(),
@@ -139,8 +148,8 @@ export const BatchAddBlocksSchema = z.object({
})
export const BatchRemoveBlocksSchema = z.object({
operation: z.literal('batch-remove-blocks'),
target: z.literal('blocks'),
operation: z.literal(BLOCKS_OPERATIONS.BATCH_REMOVE_BLOCKS),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
ids: z.array(z.string()),
}),
@@ -148,12 +157,83 @@ export const BatchRemoveBlocksSchema = z.object({
operationId: z.string().optional(),
})
export const BatchRemoveEdgesSchema = z.object({
operation: z.literal(EDGES_OPERATIONS.BATCH_REMOVE_EDGES),
target: z.literal(OPERATION_TARGETS.EDGES),
payload: z.object({
ids: z.array(z.string()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchAddEdgesSchema = z.object({
operation: z.literal(EDGES_OPERATIONS.BATCH_ADD_EDGES),
target: z.literal(OPERATION_TARGETS.EDGES),
payload: z.object({
edges: z.array(
z.object({
id: z.string(),
source: z.string(),
target: z.string(),
sourceHandle: z.string().nullable().optional(),
targetHandle: z.string().nullable().optional(),
})
),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchToggleEnabledSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
blockIds: z.array(z.string()),
previousStates: z.record(z.boolean()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchToggleHandlesSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_HANDLES),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
blockIds: z.array(z.string()),
previousStates: z.record(z.boolean()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchUpdateParentSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
updates: z.array(
z.object({
id: z.string(),
parentId: z.string(),
position: PositionSchema,
})
),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const WorkflowOperationSchema = z.union([
BlockOperationSchema,
BatchPositionUpdateSchema,
BatchAddBlocksSchema,
BatchRemoveBlocksSchema,
BatchToggleEnabledSchema,
BatchToggleHandlesSchema,
BatchUpdateParentSchema,
EdgeOperationSchema,
BatchAddEdgesSchema,
BatchRemoveEdgesSchema,
SubflowOperationSchema,
VariableOperationSchema,
WorkflowStateOperationSchema,

View File

@@ -13,7 +13,7 @@ export interface Variable {
workflowId: string
name: string // Must be unique per workflow
type: VariableType
value: any
value: unknown
validationError?: string // Tracks format validation errors
}

Some files were not shown because too many files have changed in this diff Show More