mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-28 16:27:55 -05:00
Compare commits
6 Commits
feat/termi
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2e86a7b19 | ||
|
|
78410eef84 | ||
|
|
655fe4f3b7 | ||
|
|
72a2f79701 | ||
|
|
2c2b485f81 | ||
|
|
01e0723a3a |
@@ -14,7 +14,7 @@
|
||||
--panel-width: 320px; /* PANEL_WIDTH.DEFAULT */
|
||||
--toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */
|
||||
--editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */
|
||||
--terminal-height: 206px; /* TERMINAL_HEIGHT.DEFAULT */
|
||||
--terminal-height: 155px; /* TERMINAL_HEIGHT.DEFAULT */
|
||||
}
|
||||
|
||||
.sidebar-container {
|
||||
|
||||
216
apps/sim/app/api/workflows/[id]/execute-from-block/route.ts
Normal file
216
apps/sim/app/api/workflows/[id]/execute-from-block/route.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { db, workflow as workflowTable } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, SerializableExecutionState } from '@/executor/execution/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
|
||||
const logger = createLogger('ExecuteFromBlockAPI')
|
||||
|
||||
const ExecuteFromBlockSchema = z.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
input: z.any().optional(),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = auth.userId
|
||||
|
||||
let body: unknown
|
||||
try {
|
||||
body = await req.json()
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validation = ExecuteFromBlockSchema.safeParse(body)
|
||||
if (!validation.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request body',
|
||||
details: validation.error.errors.map((e) => ({
|
||||
path: e.path.join('.'),
|
||||
message: e.message,
|
||||
})),
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
||||
const executionId = uuidv4()
|
||||
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowRecord?.workspaceId) {
|
||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceId = workflowRecord.workspaceId
|
||||
const workflowUserId = workflowRecord.userId
|
||||
|
||||
logger.info(`[${requestId}] Starting run-from-block execution`, {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||
})
|
||||
|
||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const { sendEvent, onBlockStart, onBlockComplete, onStream } = createSSECallbacks({
|
||||
executionId,
|
||||
workflowId,
|
||||
controller,
|
||||
isStreamClosed: () => isStreamClosed,
|
||||
setStreamClosed: () => {
|
||||
isStreamClosed = true
|
||||
},
|
||||
})
|
||||
|
||||
const metadata: ExecutionMetadata = {
|
||||
requestId,
|
||||
workflowId,
|
||||
userId,
|
||||
executionId,
|
||||
triggerType: 'manual',
|
||||
workspaceId,
|
||||
workflowUserId,
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
startTime: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(metadata, {}, input || {}, {})
|
||||
|
||||
try {
|
||||
const startTime = new Date()
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:started',
|
||||
timestamp: startTime.toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { startTime: startTime.toISOString() },
|
||||
})
|
||||
|
||||
const result = await executeWorkflowCore({
|
||||
snapshot,
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
runFromBlock: {
|
||||
startBlockId,
|
||||
sourceSnapshot: sourceSnapshot as SerializableExecutionState,
|
||||
},
|
||||
callbacks: { onBlockStart, onBlockComplete, onStream },
|
||||
})
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { duration: result.metadata?.duration || 0 },
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: executionResult?.error || errorMessage,
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: { ...SSE_HEADERS, 'X-Execution-Id': executionId },
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
|
||||
return NextResponse.json(
|
||||
{ error: errorMessage || 'Failed to start execution' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,7 @@ const ExecuteWorkflowSchema = z.object({
|
||||
parallels: z.record(z.any()).optional(),
|
||||
})
|
||||
.optional(),
|
||||
stopAfterBlockId: z.string().optional(),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
@@ -222,6 +223,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId,
|
||||
} = validation.data
|
||||
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
@@ -237,6 +239,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId: _stopAfterBlockId,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
} = body
|
||||
@@ -434,6 +437,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
loggingSession,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
})
|
||||
|
||||
const outputWithBase64 = includeFileBase64
|
||||
@@ -722,6 +726,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
|
||||
@@ -573,19 +573,7 @@ const TraceSpanNode = memo(function TraceSpanNode({
|
||||
return children.sort((a, b) => parseTime(a.startTime) - parseTime(b.startTime))
|
||||
}, [span, spanId, spanStartTime])
|
||||
|
||||
// Hide empty model timing segments for agents without tool calls
|
||||
const filteredChildren = useMemo(() => {
|
||||
const isAgent = span.type?.toLowerCase() === 'agent'
|
||||
const hasToolCalls =
|
||||
(span.toolCalls?.length ?? 0) > 0 || allChildren.some((c) => c.type?.toLowerCase() === 'tool')
|
||||
|
||||
if (isAgent && !hasToolCalls) {
|
||||
return allChildren.filter((c) => c.type?.toLowerCase() !== 'model')
|
||||
}
|
||||
return allChildren
|
||||
}, [allChildren, span.type, span.toolCalls])
|
||||
|
||||
const hasChildren = filteredChildren.length > 0
|
||||
const hasChildren = allChildren.length > 0
|
||||
const isExpanded = isRootWorkflow || expandedNodes.has(spanId)
|
||||
const isToggleable = !isRootWorkflow
|
||||
|
||||
@@ -697,7 +685,7 @@ const TraceSpanNode = memo(function TraceSpanNode({
|
||||
{/* Nested Children */}
|
||||
{hasChildren && (
|
||||
<div className='flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[10px]'>
|
||||
{filteredChildren.map((child, index) => (
|
||||
{allChildren.map((child, index) => (
|
||||
<div key={child.id || `${spanId}-child-${index}`} className='pl-[6px]'>
|
||||
<TraceSpanNode
|
||||
span={child}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { memo, useCallback } from 'react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut, Play } from 'lucide-react'
|
||||
import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -49,6 +51,7 @@ export const ActionBar = memo(
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
} = useCollaborativeWorkflow()
|
||||
const { setPendingSelection } = useWorkflowRegistry()
|
||||
const { handleRunFromBlock } = useWorkflowExecution()
|
||||
|
||||
const addNotification = useNotificationStore((s) => s.addNotification)
|
||||
|
||||
@@ -97,12 +100,39 @@ export const ActionBar = memo(
|
||||
)
|
||||
)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const { isExecuting, getLastExecutionSnapshot } = useExecutionStore()
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const edges = useWorkflowStore((state) => state.edges)
|
||||
|
||||
const isStartBlock = isInputDefinitionTrigger(blockType)
|
||||
const isResponseBlock = blockType === 'response'
|
||||
const isNoteBlock = blockType === 'note'
|
||||
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
|
||||
const isInsideSubflow = parentId && (parentType === 'loop' || parentType === 'parallel')
|
||||
|
||||
const snapshot = activeWorkflowId ? getLastExecutionSnapshot(activeWorkflowId) : null
|
||||
const incomingEdges = edges.filter((edge) => edge.target === blockId)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
|
||||
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
|
||||
const isSourceSatisfied = (sourceId: string) => {
|
||||
if (snapshot?.executedBlocks.includes(sourceId)) return true
|
||||
// Check if source is a trigger (has no incoming edges itself)
|
||||
const sourceIncomingEdges = edges.filter((edge) => edge.target === sourceId)
|
||||
return sourceIncomingEdges.length === 0
|
||||
}
|
||||
|
||||
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock || (snapshot && incomingEdges.every((edge) => isSourceSatisfied(edge.source)))
|
||||
const canRunFromBlock =
|
||||
dependenciesSatisfied && !isNoteBlock && !isInsideSubflow && !isExecuting
|
||||
|
||||
const handleRunFromBlockClick = useCallback(() => {
|
||||
if (!activeWorkflowId || !canRunFromBlock) return
|
||||
handleRunFromBlock(blockId, activeWorkflowId)
|
||||
}, [blockId, activeWorkflowId, canRunFromBlock, handleRunFromBlock])
|
||||
|
||||
/**
|
||||
* Get appropriate tooltip message based on disabled state
|
||||
@@ -128,30 +158,35 @@ export const ActionBar = memo(
|
||||
'dark:border-transparent dark:bg-[var(--surface-4)]'
|
||||
)}
|
||||
>
|
||||
{!isNoteBlock && (
|
||||
{!isNoteBlock && !isInsideSubflow && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
if (!disabled) {
|
||||
collaborativeBatchToggleBlockEnabled([blockId])
|
||||
if (canRunFromBlock && !disabled) {
|
||||
handleRunFromBlockClick()
|
||||
}
|
||||
}}
|
||||
className={ACTION_BUTTON_STYLES}
|
||||
disabled={disabled}
|
||||
disabled={disabled || !canRunFromBlock}
|
||||
>
|
||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||
<Play className={ICON_SIZE} />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||
{(() => {
|
||||
if (disabled) return getTooltipMessage('Run from block')
|
||||
if (isExecuting) return 'Execution in progress'
|
||||
if (!dependenciesSatisfied) return 'Run upstream blocks first'
|
||||
return 'Run from block'
|
||||
})()}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isSubflowBlock && (
|
||||
{!isNoteBlock && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
|
||||
@@ -40,9 +40,16 @@ export interface BlockMenuProps {
|
||||
onRemoveFromSubflow: () => void
|
||||
onOpenEditor: () => void
|
||||
onRename: () => void
|
||||
onRunFromBlock?: () => void
|
||||
onRunUntilBlock?: () => void
|
||||
hasClipboard?: boolean
|
||||
showRemoveFromSubflow?: boolean
|
||||
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
||||
canRunFromBlock?: boolean
|
||||
disableEdit?: boolean
|
||||
isExecuting?: boolean
|
||||
/** Whether the selected block is a trigger (has no incoming edges) */
|
||||
isPositionalTrigger?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -65,9 +72,14 @@ export function BlockMenu({
|
||||
onRemoveFromSubflow,
|
||||
onOpenEditor,
|
||||
onRename,
|
||||
onRunFromBlock,
|
||||
onRunUntilBlock,
|
||||
hasClipboard = false,
|
||||
showRemoveFromSubflow = false,
|
||||
canRunFromBlock = false,
|
||||
disableEdit = false,
|
||||
isExecuting = false,
|
||||
isPositionalTrigger = false,
|
||||
}: BlockMenuProps) {
|
||||
const isSingleBlock = selectedBlocks.length === 1
|
||||
|
||||
@@ -78,10 +90,15 @@ export function BlockMenu({
|
||||
(b) =>
|
||||
TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type)
|
||||
)
|
||||
const hasTriggerBlock = selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b))
|
||||
// A block is a trigger if it's explicitly a trigger type OR has no incoming edges (positional trigger)
|
||||
const hasTriggerBlock =
|
||||
selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b)) || isPositionalTrigger
|
||||
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
|
||||
const isSubflow =
|
||||
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
|
||||
const isInsideSubflow =
|
||||
isSingleBlock &&
|
||||
(selectedBlocks[0]?.parentType === 'loop' || selectedBlocks[0]?.parentType === 'parallel')
|
||||
|
||||
const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock
|
||||
|
||||
@@ -203,6 +220,38 @@ export function BlockMenu({
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Run from/until block - only for single non-note block, not inside subflows */}
|
||||
{isSingleBlock && !allNoteBlocks && !isInsideSubflow && (
|
||||
<>
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
disabled={!canRunFromBlock || isExecuting}
|
||||
onClick={() => {
|
||||
if (canRunFromBlock && !isExecuting) {
|
||||
onRunFromBlock?.()
|
||||
onClose()
|
||||
}
|
||||
}}
|
||||
>
|
||||
Run from block
|
||||
</PopoverItem>
|
||||
{/* Hide "Run until" for triggers - they're always at the start */}
|
||||
{!hasTriggerBlock && (
|
||||
<PopoverItem
|
||||
disabled={isExecuting}
|
||||
onClick={() => {
|
||||
if (!isExecuting) {
|
||||
onRunUntilBlock?.()
|
||||
onClose()
|
||||
}
|
||||
}}
|
||||
>
|
||||
Run until block
|
||||
</PopoverItem>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem
|
||||
|
||||
@@ -3,9 +3,8 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import clsx from 'clsx'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { ChevronDown, RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { useShallow } from 'zustand/react/shallow'
|
||||
import { ChevronDown } from '@/components/emcn'
|
||||
import {
|
||||
FieldItem,
|
||||
type SchemaField,
|
||||
@@ -116,8 +115,9 @@ function ConnectionItem({
|
||||
{hasFields && (
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]',
|
||||
!isExpanded && '-rotate-90'
|
||||
'h-3.5 w-3.5 flex-shrink-0 transition-transform duration-100',
|
||||
'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]',
|
||||
isExpanded && 'rotate-180'
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { memo } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { Filter } from 'lucide-react'
|
||||
import {
|
||||
Button,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
PopoverScrollArea,
|
||||
PopoverSection,
|
||||
PopoverTrigger,
|
||||
} from '@/components/emcn'
|
||||
import type {
|
||||
BlockInfo,
|
||||
TerminalFilters,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
import { getBlockIcon } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils'
|
||||
|
||||
/**
|
||||
* Props for the FilterPopover component
|
||||
*/
|
||||
export interface FilterPopoverProps {
|
||||
open: boolean
|
||||
onOpenChange: (open: boolean) => void
|
||||
filters: TerminalFilters
|
||||
toggleStatus: (status: 'error' | 'info') => void
|
||||
toggleBlock: (blockId: string) => void
|
||||
uniqueBlocks: BlockInfo[]
|
||||
hasActiveFilters: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter popover component used in terminal header and output panel
|
||||
*/
|
||||
export const FilterPopover = memo(function FilterPopover({
|
||||
open,
|
||||
onOpenChange,
|
||||
filters,
|
||||
toggleStatus,
|
||||
toggleBlock,
|
||||
uniqueBlocks,
|
||||
hasActiveFilters,
|
||||
}: FilterPopoverProps) {
|
||||
return (
|
||||
<Popover open={open} onOpenChange={onOpenChange} size='sm'>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-1.5 -m-1.5'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
aria-label='Filters'
|
||||
>
|
||||
<Filter
|
||||
className={clsx('h-3 w-3', hasActiveFilters && 'text-[var(--brand-secondary)]')}
|
||||
/>
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
side='top'
|
||||
align='end'
|
||||
sideOffset={4}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
minWidth={160}
|
||||
maxWidth={220}
|
||||
maxHeight={300}
|
||||
>
|
||||
<PopoverSection>Status</PopoverSection>
|
||||
<PopoverItem
|
||||
active={filters.statuses.has('error')}
|
||||
showCheck={filters.statuses.has('error')}
|
||||
onClick={() => toggleStatus('error')}
|
||||
>
|
||||
<div
|
||||
className='h-[6px] w-[6px] rounded-[2px]'
|
||||
style={{ backgroundColor: 'var(--text-error)' }}
|
||||
/>
|
||||
<span className='flex-1'>Error</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={filters.statuses.has('info')}
|
||||
showCheck={filters.statuses.has('info')}
|
||||
onClick={() => toggleStatus('info')}
|
||||
>
|
||||
<div
|
||||
className='h-[6px] w-[6px] rounded-[2px]'
|
||||
style={{ backgroundColor: 'var(--terminal-status-info-color)' }}
|
||||
/>
|
||||
<span className='flex-1'>Info</span>
|
||||
</PopoverItem>
|
||||
|
||||
{uniqueBlocks.length > 0 && (
|
||||
<>
|
||||
<PopoverDivider className='my-[4px]' />
|
||||
<PopoverSection className='!mt-0'>Blocks</PopoverSection>
|
||||
<PopoverScrollArea className='max-h-[100px]'>
|
||||
{uniqueBlocks.map((block) => {
|
||||
const BlockIcon = getBlockIcon(block.blockType)
|
||||
const isSelected = filters.blockIds.has(block.blockId)
|
||||
|
||||
return (
|
||||
<PopoverItem
|
||||
key={block.blockId}
|
||||
active={isSelected}
|
||||
showCheck={isSelected}
|
||||
onClick={() => toggleBlock(block.blockId)}
|
||||
>
|
||||
{BlockIcon && <BlockIcon className='h-3 w-3' />}
|
||||
<span className='flex-1'>{block.blockName}</span>
|
||||
</PopoverItem>
|
||||
)
|
||||
})}
|
||||
</PopoverScrollArea>
|
||||
</>
|
||||
)}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
export { FilterPopover, type FilterPopoverProps } from './filter-popover'
|
||||
@@ -1,5 +1,2 @@
|
||||
export { FilterPopover, type FilterPopoverProps } from './filter-popover'
|
||||
export { LogRowContextMenu, type LogRowContextMenuProps } from './log-row-context-menu'
|
||||
export { OutputPanel, type OutputPanelProps } from './output-panel'
|
||||
export { RunningBadge, StatusDisplay, type StatusDisplayProps } from './status-display'
|
||||
export { ToggleButton, type ToggleButtonProps } from './toggle-button'
|
||||
export { LogRowContextMenu } from './log-row-context-menu'
|
||||
export { OutputContextMenu } from './output-context-menu'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { memo, type RefObject } from 'react'
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
@@ -8,13 +8,20 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type {
|
||||
ContextMenuPosition,
|
||||
TerminalFilters,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
|
||||
export interface LogRowContextMenuProps {
|
||||
interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
interface TerminalFilters {
|
||||
blockIds: Set<string>
|
||||
statuses: Set<'error' | 'info'>
|
||||
runIds: Set<string>
|
||||
}
|
||||
|
||||
interface LogRowContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: ContextMenuPosition
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
@@ -23,16 +30,19 @@ export interface LogRowContextMenuProps {
|
||||
filters: TerminalFilters
|
||||
onFilterByBlock: (blockId: string) => void
|
||||
onFilterByStatus: (status: 'error' | 'info') => void
|
||||
onFilterByRunId: (runId: string) => void
|
||||
onCopyRunId: (runId: string) => void
|
||||
onClearFilters: () => void
|
||||
onClearConsole: () => void
|
||||
onFixInCopilot: (entry: ConsoleEntry) => void
|
||||
hasActiveFilters: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu for terminal log rows (left side).
|
||||
* Displays filtering options based on the selected row's properties.
|
||||
*/
|
||||
export const LogRowContextMenu = memo(function LogRowContextMenu({
|
||||
export function LogRowContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
@@ -41,15 +51,19 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
|
||||
filters,
|
||||
onFilterByBlock,
|
||||
onFilterByStatus,
|
||||
onFilterByRunId,
|
||||
onCopyRunId,
|
||||
onClearFilters,
|
||||
onClearConsole,
|
||||
onFixInCopilot,
|
||||
hasActiveFilters,
|
||||
}: LogRowContextMenuProps) {
|
||||
const hasRunId = entry?.executionId != null
|
||||
|
||||
const isBlockFiltered = entry ? filters.blockIds.has(entry.blockId) : false
|
||||
const entryStatus = entry?.success ? 'info' : 'error'
|
||||
const isStatusFiltered = entry ? filters.statuses.has(entryStatus) : false
|
||||
const isRunIdFiltered = entry?.executionId ? filters.runIds.has(entry.executionId) : false
|
||||
|
||||
return (
|
||||
<Popover
|
||||
@@ -120,11 +134,34 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
|
||||
>
|
||||
Filter by Status
|
||||
</PopoverItem>
|
||||
{hasRunId && (
|
||||
<PopoverItem
|
||||
showCheck={isRunIdFiltered}
|
||||
onClick={() => {
|
||||
onFilterByRunId(entry.executionId!)
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Filter by Run ID
|
||||
</PopoverItem>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Clear filters */}
|
||||
{hasActiveFilters && (
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onClearFilters()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Clear All Filters
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Destructive action */}
|
||||
{entry && <PopoverDivider />}
|
||||
{(entry || hasActiveFilters) && <PopoverDivider />}
|
||||
<PopoverItem
|
||||
onClick={() => {
|
||||
onClearConsole()
|
||||
@@ -136,4 +173,4 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export { LogRowContextMenu, type LogRowContextMenuProps } from './log-row-context-menu'
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { memo, type RefObject } from 'react'
|
||||
import type { RefObject } from 'react'
|
||||
import {
|
||||
Popover,
|
||||
PopoverAnchor,
|
||||
@@ -8,9 +8,13 @@ import {
|
||||
PopoverDivider,
|
||||
PopoverItem,
|
||||
} from '@/components/emcn'
|
||||
import type { ContextMenuPosition } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
|
||||
export interface OutputContextMenuProps {
|
||||
interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
interface OutputContextMenuProps {
|
||||
isOpen: boolean
|
||||
position: ContextMenuPosition
|
||||
menuRef: RefObject<HTMLDivElement | null>
|
||||
@@ -18,8 +22,6 @@ export interface OutputContextMenuProps {
|
||||
onCopySelection: () => void
|
||||
onCopyAll: () => void
|
||||
onSearch: () => void
|
||||
structuredView: boolean
|
||||
onToggleStructuredView: () => void
|
||||
wrapText: boolean
|
||||
onToggleWrap: () => void
|
||||
openOnRun: boolean
|
||||
@@ -32,7 +34,7 @@ export interface OutputContextMenuProps {
|
||||
* Context menu for terminal output panel (right side).
|
||||
* Displays copy, search, and display options for the code viewer.
|
||||
*/
|
||||
export const OutputContextMenu = memo(function OutputContextMenu({
|
||||
export function OutputContextMenu({
|
||||
isOpen,
|
||||
position,
|
||||
menuRef,
|
||||
@@ -40,8 +42,6 @@ export const OutputContextMenu = memo(function OutputContextMenu({
|
||||
onCopySelection,
|
||||
onCopyAll,
|
||||
onSearch,
|
||||
structuredView,
|
||||
onToggleStructuredView,
|
||||
wrapText,
|
||||
onToggleWrap,
|
||||
openOnRun,
|
||||
@@ -96,9 +96,6 @@ export const OutputContextMenu = memo(function OutputContextMenu({
|
||||
|
||||
{/* Display settings - toggles don't close menu */}
|
||||
<PopoverDivider />
|
||||
<PopoverItem showCheck={structuredView} onClick={onToggleStructuredView}>
|
||||
Structured View
|
||||
</PopoverItem>
|
||||
<PopoverItem showCheck={wrapText} onClick={onToggleWrap}>
|
||||
Wrap Text
|
||||
</PopoverItem>
|
||||
@@ -119,4 +116,4 @@ export const OutputContextMenu = memo(function OutputContextMenu({
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -1,913 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import type React from 'react'
|
||||
import {
|
||||
createContext,
|
||||
memo,
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { List, type RowComponentProps, useListRef } from 'react-window'
|
||||
import { Badge, ChevronDown } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
|
||||
type ValueType = 'null' | 'undefined' | 'array' | 'string' | 'number' | 'boolean' | 'object'
|
||||
type BadgeVariant = 'green' | 'blue' | 'orange' | 'purple' | 'gray' | 'red'
|
||||
|
||||
interface NodeEntry {
|
||||
key: string
|
||||
value: unknown
|
||||
path: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Search context for structured output tree.
|
||||
*/
|
||||
interface SearchContextValue {
|
||||
query: string
|
||||
pathToMatchIndices: Map<string, number[]>
|
||||
}
|
||||
|
||||
const SearchContext = createContext<SearchContextValue | null>(null)
|
||||
|
||||
/**
|
||||
* Configuration for virtualized rendering.
|
||||
*/
|
||||
const CONFIG = {
|
||||
ROW_HEIGHT: 22,
|
||||
INDENT_PER_LEVEL: 12,
|
||||
BASE_PADDING: 20,
|
||||
MAX_SEARCH_DEPTH: 100,
|
||||
OVERSCAN_COUNT: 10,
|
||||
VIRTUALIZATION_THRESHOLD: 200,
|
||||
} as const
|
||||
|
||||
const BADGE_VARIANTS: Record<ValueType, BadgeVariant> = {
|
||||
string: 'green',
|
||||
number: 'blue',
|
||||
boolean: 'orange',
|
||||
array: 'purple',
|
||||
null: 'gray',
|
||||
undefined: 'gray',
|
||||
object: 'gray',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Styling constants matching the original non-virtualized implementation.
|
||||
*/
|
||||
const STYLES = {
|
||||
row: 'group flex min-h-[22px] cursor-pointer items-center gap-[6px] rounded-[8px] px-[6px] -mx-[6px] hover:bg-[var(--surface-6)] dark:hover:bg-[var(--surface-5)]',
|
||||
chevron:
|
||||
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]',
|
||||
keyName:
|
||||
'font-medium text-[13px] text-[var(--text-primary)] group-hover:text-[var(--text-primary)]',
|
||||
badge: 'rounded-[4px] px-[4px] py-[0px] text-[11px]',
|
||||
summary: 'text-[12px] text-[var(--text-tertiary)]',
|
||||
indent:
|
||||
'mt-[2px] ml-[3px] flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[9px]',
|
||||
value: 'min-w-0 py-[2px] text-[13px] text-[var(--text-primary)]',
|
||||
emptyValue: 'py-[2px] text-[13px] text-[var(--text-tertiary)]',
|
||||
matchHighlight: 'bg-yellow-200/60 dark:bg-yellow-500/40',
|
||||
currentMatchHighlight: 'bg-orange-400',
|
||||
} as const
|
||||
|
||||
const EMPTY_MATCH_INDICES: number[] = []
|
||||
|
||||
function getTypeLabel(value: unknown): ValueType {
|
||||
if (value === null) return 'null'
|
||||
if (value === undefined) return 'undefined'
|
||||
if (Array.isArray(value)) return 'array'
|
||||
return typeof value as ValueType
|
||||
}
|
||||
|
||||
function formatPrimitive(value: unknown): string {
|
||||
if (value === null) return 'null'
|
||||
if (value === undefined) return 'undefined'
|
||||
return String(value)
|
||||
}
|
||||
|
||||
function isPrimitive(value: unknown): value is null | undefined | string | number | boolean {
|
||||
return value === null || value === undefined || typeof value !== 'object'
|
||||
}
|
||||
|
||||
function isEmpty(value: unknown): boolean {
|
||||
if (Array.isArray(value)) return value.length === 0
|
||||
if (typeof value === 'object' && value !== null) return Object.keys(value).length === 0
|
||||
return false
|
||||
}
|
||||
|
||||
function extractErrorMessage(data: unknown): string {
|
||||
if (typeof data === 'string') return data
|
||||
if (data instanceof Error) return data.message
|
||||
if (typeof data === 'object' && data !== null && 'message' in data) {
|
||||
return String((data as { message: unknown }).message)
|
||||
}
|
||||
return JSON.stringify(data, null, 2)
|
||||
}
|
||||
|
||||
function buildEntries(value: unknown, basePath: string): NodeEntry[] {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item, i) => ({ key: String(i), value: item, path: `${basePath}[${i}]` }))
|
||||
}
|
||||
return Object.entries(value as Record<string, unknown>).map(([k, v]) => ({
|
||||
key: k,
|
||||
value: v,
|
||||
path: `${basePath}.${k}`,
|
||||
}))
|
||||
}
|
||||
|
||||
function getCollapsedSummary(value: unknown): string | null {
|
||||
if (Array.isArray(value)) {
|
||||
const len = value.length
|
||||
return `${len} item${len !== 1 ? 's' : ''}`
|
||||
}
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
const count = Object.keys(value).length
|
||||
return `${count} key${count !== 1 ? 's' : ''}`
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
function computeInitialPaths(data: unknown, isError: boolean): Set<string> {
|
||||
if (isError) return new Set(['root.error'])
|
||||
if (!data || typeof data !== 'object') return new Set()
|
||||
const entries = Array.isArray(data)
|
||||
? data.map((_, i) => `root[${i}]`)
|
||||
: Object.keys(data).map((k) => `root.${k}`)
|
||||
return new Set(entries)
|
||||
}
|
||||
|
||||
function getAncestorPaths(path: string): string[] {
|
||||
const ancestors: string[] = []
|
||||
let current = path
|
||||
|
||||
while (current.includes('.') || current.includes('[')) {
|
||||
const splitPoint = Math.max(current.lastIndexOf('.'), current.lastIndexOf('['))
|
||||
if (splitPoint <= 0) break
|
||||
current = current.slice(0, splitPoint)
|
||||
if (current !== 'root') ancestors.push(current)
|
||||
}
|
||||
|
||||
return ancestors
|
||||
}
|
||||
|
||||
function findTextMatches(text: string, query: string): Array<[number, number]> {
|
||||
if (!query) return []
|
||||
|
||||
const matches: Array<[number, number]> = []
|
||||
const lowerText = text.toLowerCase()
|
||||
const lowerQuery = query.toLowerCase()
|
||||
let pos = 0
|
||||
|
||||
while (pos < lowerText.length) {
|
||||
const idx = lowerText.indexOf(lowerQuery, pos)
|
||||
if (idx === -1) break
|
||||
matches.push([idx, idx + query.length])
|
||||
pos = idx + 1
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
function addPrimitiveMatches(value: unknown, path: string, query: string, matches: string[]): void {
|
||||
const text = formatPrimitive(value)
|
||||
const count = findTextMatches(text, query).length
|
||||
for (let i = 0; i < count; i++) {
|
||||
matches.push(path)
|
||||
}
|
||||
}
|
||||
|
||||
function collectAllMatchPaths(data: unknown, query: string, basePath: string, depth = 0): string[] {
|
||||
if (!query || depth > CONFIG.MAX_SEARCH_DEPTH) return []
|
||||
|
||||
const matches: string[] = []
|
||||
|
||||
if (isPrimitive(data)) {
|
||||
addPrimitiveMatches(data, `${basePath}.value`, query, matches)
|
||||
return matches
|
||||
}
|
||||
|
||||
for (const entry of buildEntries(data, basePath)) {
|
||||
if (isPrimitive(entry.value)) {
|
||||
addPrimitiveMatches(entry.value, entry.path, query, matches)
|
||||
} else {
|
||||
matches.push(...collectAllMatchPaths(entry.value, query, entry.path, depth + 1))
|
||||
}
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
function buildPathToIndicesMap(matchPaths: string[]): Map<string, number[]> {
|
||||
const map = new Map<string, number[]>()
|
||||
matchPaths.forEach((path, globalIndex) => {
|
||||
const existing = map.get(path)
|
||||
if (existing) {
|
||||
existing.push(globalIndex)
|
||||
} else {
|
||||
map.set(path, [globalIndex])
|
||||
}
|
||||
})
|
||||
return map
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders text with search highlights using segments.
|
||||
*/
|
||||
function renderHighlightedSegments(
|
||||
text: string,
|
||||
query: string,
|
||||
matchIndices: number[],
|
||||
currentMatchIndex: number,
|
||||
path: string
|
||||
): React.ReactNode {
|
||||
if (!query || matchIndices.length === 0) return text
|
||||
|
||||
const textMatches = findTextMatches(text, query)
|
||||
if (textMatches.length === 0) return text
|
||||
|
||||
const segments: React.ReactNode[] = []
|
||||
let lastEnd = 0
|
||||
|
||||
textMatches.forEach(([start, end], i) => {
|
||||
const globalIndex = matchIndices[i]
|
||||
const isCurrent = globalIndex === currentMatchIndex
|
||||
|
||||
if (start > lastEnd) {
|
||||
segments.push(<span key={`t-${path}-${start}`}>{text.slice(lastEnd, start)}</span>)
|
||||
}
|
||||
|
||||
segments.push(
|
||||
<mark
|
||||
key={`m-${path}-${start}`}
|
||||
data-search-match
|
||||
data-match-index={globalIndex}
|
||||
className={cn(
|
||||
'rounded-sm',
|
||||
isCurrent ? STYLES.currentMatchHighlight : STYLES.matchHighlight
|
||||
)}
|
||||
>
|
||||
{text.slice(start, end)}
|
||||
</mark>
|
||||
)
|
||||
lastEnd = end
|
||||
})
|
||||
|
||||
if (lastEnd < text.length) {
|
||||
segments.push(<span key={`t-${path}-${lastEnd}`}>{text.slice(lastEnd)}</span>)
|
||||
}
|
||||
|
||||
return <>{segments}</>
|
||||
}
|
||||
|
||||
interface HighlightedTextProps {
|
||||
text: string
|
||||
matchIndices: number[]
|
||||
path: string
|
||||
currentMatchIndex: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders text with search highlights for non-virtualized mode.
|
||||
* Accepts currentMatchIndex as prop to ensure re-render when it changes.
|
||||
*/
|
||||
const HighlightedText = memo(function HighlightedText({
|
||||
text,
|
||||
matchIndices,
|
||||
path,
|
||||
currentMatchIndex,
|
||||
}: HighlightedTextProps) {
|
||||
const searchContext = useContext(SearchContext)
|
||||
|
||||
if (!searchContext || matchIndices.length === 0) return <>{text}</>
|
||||
|
||||
return (
|
||||
<>
|
||||
{renderHighlightedSegments(text, searchContext.query, matchIndices, currentMatchIndex, path)}
|
||||
</>
|
||||
)
|
||||
})
|
||||
|
||||
interface StructuredNodeProps {
|
||||
name: string
|
||||
value: unknown
|
||||
path: string
|
||||
expandedPaths: Set<string>
|
||||
onToggle: (path: string) => void
|
||||
wrapText: boolean
|
||||
currentMatchIndex: number
|
||||
isError?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursive node component for non-virtualized rendering.
|
||||
* Preserves exact original styling with border-left tree lines.
|
||||
*/
|
||||
const StructuredNode = memo(function StructuredNode({
|
||||
name,
|
||||
value,
|
||||
path,
|
||||
expandedPaths,
|
||||
onToggle,
|
||||
wrapText,
|
||||
currentMatchIndex,
|
||||
isError = false,
|
||||
}: StructuredNodeProps) {
|
||||
const searchContext = useContext(SearchContext)
|
||||
const type = getTypeLabel(value)
|
||||
const isPrimitiveValue = isPrimitive(value)
|
||||
const isEmptyValue = !isPrimitiveValue && isEmpty(value)
|
||||
const isExpanded = expandedPaths.has(path)
|
||||
|
||||
const handleToggle = useCallback(() => onToggle(path), [onToggle, path])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
handleToggle()
|
||||
}
|
||||
},
|
||||
[handleToggle]
|
||||
)
|
||||
|
||||
const childEntries = useMemo(
|
||||
() => (isPrimitiveValue || isEmptyValue ? [] : buildEntries(value, path)),
|
||||
[value, isPrimitiveValue, isEmptyValue, path]
|
||||
)
|
||||
|
||||
const collapsedSummary = useMemo(
|
||||
() => (isPrimitiveValue ? null : getCollapsedSummary(value)),
|
||||
[value, isPrimitiveValue]
|
||||
)
|
||||
|
||||
const badgeVariant = isError ? 'red' : BADGE_VARIANTS[type]
|
||||
const valueText = isPrimitiveValue ? formatPrimitive(value) : ''
|
||||
const matchIndices = searchContext?.pathToMatchIndices.get(path) ?? EMPTY_MATCH_INDICES
|
||||
|
||||
return (
|
||||
<div className='flex min-w-0 flex-col'>
|
||||
<div
|
||||
className={STYLES.row}
|
||||
onClick={handleToggle}
|
||||
onKeyDown={handleKeyDown}
|
||||
role='button'
|
||||
tabIndex={0}
|
||||
aria-expanded={isExpanded}
|
||||
>
|
||||
<span className={cn(STYLES.keyName, isError && 'text-[var(--text-error)]')}>{name}</span>
|
||||
<Badge variant={badgeVariant} className={STYLES.badge}>
|
||||
{type}
|
||||
</Badge>
|
||||
{!isExpanded && collapsedSummary && (
|
||||
<span className={STYLES.summary}>{collapsedSummary}</span>
|
||||
)}
|
||||
<ChevronDown className={cn(STYLES.chevron, !isExpanded && '-rotate-90')} />
|
||||
</div>
|
||||
|
||||
{isExpanded && (
|
||||
<div className={STYLES.indent}>
|
||||
{isPrimitiveValue ? (
|
||||
<div
|
||||
className={cn(
|
||||
STYLES.value,
|
||||
wrapText ? '[word-break:break-word]' : 'whitespace-nowrap'
|
||||
)}
|
||||
>
|
||||
<HighlightedText
|
||||
text={valueText}
|
||||
matchIndices={matchIndices}
|
||||
path={path}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
/>
|
||||
</div>
|
||||
) : isEmptyValue ? (
|
||||
<div className={STYLES.emptyValue}>{Array.isArray(value) ? '[]' : '{}'}</div>
|
||||
) : (
|
||||
childEntries.map((entry) => (
|
||||
<StructuredNode
|
||||
key={entry.path}
|
||||
name={entry.key}
|
||||
value={entry.value}
|
||||
path={entry.path}
|
||||
expandedPaths={expandedPaths}
|
||||
onToggle={onToggle}
|
||||
wrapText={wrapText}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Flattened row for virtualization.
|
||||
*/
|
||||
interface FlatRow {
|
||||
path: string
|
||||
key: string
|
||||
value: unknown
|
||||
depth: number
|
||||
type: 'header' | 'value' | 'empty'
|
||||
valueType: ValueType
|
||||
isExpanded: boolean
|
||||
isError: boolean
|
||||
collapsedSummary: string | null
|
||||
displayText: string
|
||||
matchIndices: number[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens the tree into rows for virtualization.
|
||||
*/
|
||||
function flattenTree(
|
||||
data: unknown,
|
||||
expandedPaths: Set<string>,
|
||||
pathToMatchIndices: Map<string, number[]>,
|
||||
isError: boolean
|
||||
): FlatRow[] {
|
||||
const rows: FlatRow[] = []
|
||||
|
||||
if (isError) {
|
||||
const errorText = extractErrorMessage(data)
|
||||
const isExpanded = expandedPaths.has('root.error')
|
||||
|
||||
rows.push({
|
||||
path: 'root.error',
|
||||
key: 'error',
|
||||
value: errorText,
|
||||
depth: 0,
|
||||
type: 'header',
|
||||
valueType: 'string',
|
||||
isExpanded,
|
||||
isError: true,
|
||||
collapsedSummary: null,
|
||||
displayText: '',
|
||||
matchIndices: [],
|
||||
})
|
||||
|
||||
if (isExpanded) {
|
||||
rows.push({
|
||||
path: 'root.error.value',
|
||||
key: '',
|
||||
value: errorText,
|
||||
depth: 1,
|
||||
type: 'value',
|
||||
valueType: 'string',
|
||||
isExpanded: false,
|
||||
isError: true,
|
||||
collapsedSummary: null,
|
||||
displayText: errorText,
|
||||
matchIndices: pathToMatchIndices.get('root.error') ?? [],
|
||||
})
|
||||
}
|
||||
|
||||
return rows
|
||||
}
|
||||
|
||||
function processNode(key: string, value: unknown, path: string, depth: number): void {
|
||||
const valueType = getTypeLabel(value)
|
||||
const isPrimitiveValue = isPrimitive(value)
|
||||
const isEmptyValue = !isPrimitiveValue && isEmpty(value)
|
||||
const isExpanded = expandedPaths.has(path)
|
||||
const collapsedSummary = isPrimitiveValue ? null : getCollapsedSummary(value)
|
||||
|
||||
rows.push({
|
||||
path,
|
||||
key,
|
||||
value,
|
||||
depth,
|
||||
type: 'header',
|
||||
valueType,
|
||||
isExpanded,
|
||||
isError: false,
|
||||
collapsedSummary,
|
||||
displayText: '',
|
||||
matchIndices: [],
|
||||
})
|
||||
|
||||
if (isExpanded) {
|
||||
if (isPrimitiveValue) {
|
||||
rows.push({
|
||||
path: `${path}.value`,
|
||||
key: '',
|
||||
value,
|
||||
depth: depth + 1,
|
||||
type: 'value',
|
||||
valueType,
|
||||
isExpanded: false,
|
||||
isError: false,
|
||||
collapsedSummary: null,
|
||||
displayText: formatPrimitive(value),
|
||||
matchIndices: pathToMatchIndices.get(path) ?? [],
|
||||
})
|
||||
} else if (isEmptyValue) {
|
||||
rows.push({
|
||||
path: `${path}.empty`,
|
||||
key: '',
|
||||
value,
|
||||
depth: depth + 1,
|
||||
type: 'empty',
|
||||
valueType,
|
||||
isExpanded: false,
|
||||
isError: false,
|
||||
collapsedSummary: null,
|
||||
displayText: Array.isArray(value) ? '[]' : '{}',
|
||||
matchIndices: [],
|
||||
})
|
||||
} else {
|
||||
for (const entry of buildEntries(value, path)) {
|
||||
processNode(entry.key, entry.value, entry.path, depth + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isPrimitive(data)) {
|
||||
processNode('value', data, 'root.value', 0)
|
||||
} else if (data && typeof data === 'object') {
|
||||
for (const entry of buildEntries(data, 'root')) {
|
||||
processNode(entry.key, entry.value, entry.path, 0)
|
||||
}
|
||||
}
|
||||
|
||||
return rows
|
||||
}
|
||||
|
||||
/**
|
||||
* Counts total visible rows for determining virtualization threshold.
|
||||
*/
|
||||
function countVisibleRows(data: unknown, expandedPaths: Set<string>, isError: boolean): number {
|
||||
if (isError) return expandedPaths.has('root.error') ? 2 : 1
|
||||
|
||||
let count = 0
|
||||
|
||||
function countNode(value: unknown, path: string): void {
|
||||
count++
|
||||
if (!expandedPaths.has(path)) return
|
||||
|
||||
if (isPrimitive(value) || isEmpty(value)) {
|
||||
count++
|
||||
} else {
|
||||
for (const entry of buildEntries(value, path)) {
|
||||
countNode(entry.value, entry.path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isPrimitive(data)) {
|
||||
countNode(data, 'root.value')
|
||||
} else if (data && typeof data === 'object') {
|
||||
for (const entry of buildEntries(data, 'root')) {
|
||||
countNode(entry.value, entry.path)
|
||||
}
|
||||
}
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
interface VirtualizedRowProps {
|
||||
rows: FlatRow[]
|
||||
onToggle: (path: string) => void
|
||||
wrapText: boolean
|
||||
searchQuery: string
|
||||
currentMatchIndex: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Virtualized row component for large data sets.
|
||||
*/
|
||||
function VirtualizedRow({ index, style, ...props }: RowComponentProps<VirtualizedRowProps>) {
|
||||
const { rows, onToggle, wrapText, searchQuery, currentMatchIndex } = props
|
||||
const row = rows[index]
|
||||
const paddingLeft = CONFIG.BASE_PADDING + row.depth * CONFIG.INDENT_PER_LEVEL
|
||||
|
||||
if (row.type === 'header') {
|
||||
const badgeVariant = row.isError ? 'red' : BADGE_VARIANTS[row.valueType]
|
||||
|
||||
return (
|
||||
<div style={{ ...style, paddingLeft }} data-row-index={index}>
|
||||
<div
|
||||
className={STYLES.row}
|
||||
onClick={() => onToggle(row.path)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
e.preventDefault()
|
||||
onToggle(row.path)
|
||||
}
|
||||
}}
|
||||
role='button'
|
||||
tabIndex={0}
|
||||
aria-expanded={row.isExpanded}
|
||||
>
|
||||
<span className={cn(STYLES.keyName, row.isError && 'text-[var(--text-error)]')}>
|
||||
{row.key}
|
||||
</span>
|
||||
<Badge variant={badgeVariant} className={STYLES.badge}>
|
||||
{row.valueType}
|
||||
</Badge>
|
||||
{!row.isExpanded && row.collapsedSummary && (
|
||||
<span className={STYLES.summary}>{row.collapsedSummary}</span>
|
||||
)}
|
||||
<ChevronDown className={cn(STYLES.chevron, !row.isExpanded && '-rotate-90')} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (row.type === 'empty') {
|
||||
return (
|
||||
<div style={{ ...style, paddingLeft }} data-row-index={index}>
|
||||
<div className={STYLES.emptyValue}>{row.displayText}</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div style={{ ...style, paddingLeft }} data-row-index={index}>
|
||||
<div
|
||||
className={cn(
|
||||
STYLES.value,
|
||||
row.isError && 'text-[var(--text-error)]',
|
||||
wrapText ? '[word-break:break-word]' : 'whitespace-nowrap'
|
||||
)}
|
||||
>
|
||||
{renderHighlightedSegments(
|
||||
row.displayText,
|
||||
searchQuery,
|
||||
row.matchIndices,
|
||||
currentMatchIndex,
|
||||
row.path
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export interface StructuredOutputProps {
|
||||
data: unknown
|
||||
wrapText?: boolean
|
||||
isError?: boolean
|
||||
isRunning?: boolean
|
||||
className?: string
|
||||
searchQuery?: string
|
||||
currentMatchIndex?: number
|
||||
onMatchCountChange?: (count: number) => void
|
||||
contentRef?: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders structured data as nested collapsible blocks.
|
||||
* Uses virtualization for large data sets (>200 visible rows) while
|
||||
* preserving exact original styling for smaller data sets.
|
||||
*/
|
||||
export const StructuredOutput = memo(function StructuredOutput({
|
||||
data,
|
||||
wrapText = true,
|
||||
isError = false,
|
||||
isRunning = false,
|
||||
className,
|
||||
searchQuery,
|
||||
currentMatchIndex = 0,
|
||||
onMatchCountChange,
|
||||
contentRef,
|
||||
}: StructuredOutputProps) {
|
||||
const [expandedPaths, setExpandedPaths] = useState<Set<string>>(() =>
|
||||
computeInitialPaths(data, isError)
|
||||
)
|
||||
const prevDataRef = useRef(data)
|
||||
const prevIsErrorRef = useRef(isError)
|
||||
const internalRef = useRef<HTMLDivElement>(null)
|
||||
const listRef = useListRef(null)
|
||||
const [containerHeight, setContainerHeight] = useState(400)
|
||||
|
||||
const setContainerRef = useCallback(
|
||||
(node: HTMLDivElement | null) => {
|
||||
;(internalRef as React.MutableRefObject<HTMLDivElement | null>).current = node
|
||||
if (contentRef) {
|
||||
;(contentRef as React.MutableRefObject<HTMLDivElement | null>).current = node
|
||||
}
|
||||
},
|
||||
[contentRef]
|
||||
)
|
||||
|
||||
// Measure container height
|
||||
useEffect(() => {
|
||||
const container = internalRef.current?.parentElement
|
||||
if (!container) return
|
||||
|
||||
const updateHeight = () => setContainerHeight(container.clientHeight)
|
||||
updateHeight()
|
||||
|
||||
const resizeObserver = new ResizeObserver(updateHeight)
|
||||
resizeObserver.observe(container)
|
||||
return () => resizeObserver.disconnect()
|
||||
}, [])
|
||||
|
||||
// Reset expanded paths when data changes
|
||||
useEffect(() => {
|
||||
if (prevDataRef.current !== data || prevIsErrorRef.current !== isError) {
|
||||
prevDataRef.current = data
|
||||
prevIsErrorRef.current = isError
|
||||
setExpandedPaths(computeInitialPaths(data, isError))
|
||||
}
|
||||
}, [data, isError])
|
||||
|
||||
const allMatchPaths = useMemo(() => {
|
||||
if (!searchQuery) return []
|
||||
if (isError) {
|
||||
const errorText = extractErrorMessage(data)
|
||||
const count = findTextMatches(errorText, searchQuery).length
|
||||
return Array(count).fill('root.error') as string[]
|
||||
}
|
||||
return collectAllMatchPaths(data, searchQuery, 'root')
|
||||
}, [data, searchQuery, isError])
|
||||
|
||||
useEffect(() => {
|
||||
onMatchCountChange?.(allMatchPaths.length)
|
||||
}, [allMatchPaths.length, onMatchCountChange])
|
||||
|
||||
const pathToMatchIndices = useMemo(() => buildPathToIndicesMap(allMatchPaths), [allMatchPaths])
|
||||
|
||||
// Auto-expand to current match
|
||||
useEffect(() => {
|
||||
if (
|
||||
allMatchPaths.length === 0 ||
|
||||
currentMatchIndex < 0 ||
|
||||
currentMatchIndex >= allMatchPaths.length
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const currentPath = allMatchPaths[currentMatchIndex]
|
||||
const pathsToExpand = [currentPath, ...getAncestorPaths(currentPath)]
|
||||
|
||||
setExpandedPaths((prev) => {
|
||||
if (pathsToExpand.every((p) => prev.has(p))) return prev
|
||||
const next = new Set(prev)
|
||||
pathsToExpand.forEach((p) => next.add(p))
|
||||
return next
|
||||
})
|
||||
}, [currentMatchIndex, allMatchPaths])
|
||||
|
||||
const handleToggle = useCallback((path: string) => {
|
||||
setExpandedPaths((prev) => {
|
||||
const next = new Set(prev)
|
||||
if (next.has(path)) {
|
||||
next.delete(path)
|
||||
} else {
|
||||
next.add(path)
|
||||
}
|
||||
return next
|
||||
})
|
||||
}, [])
|
||||
|
||||
const rootEntries = useMemo<NodeEntry[]>(() => {
|
||||
if (isPrimitive(data)) return [{ key: 'value', value: data, path: 'root.value' }]
|
||||
return buildEntries(data, 'root')
|
||||
}, [data])
|
||||
|
||||
const searchContextValue = useMemo<SearchContextValue | null>(() => {
|
||||
if (!searchQuery) return null
|
||||
return { query: searchQuery, pathToMatchIndices }
|
||||
}, [searchQuery, pathToMatchIndices])
|
||||
|
||||
const visibleRowCount = useMemo(
|
||||
() => countVisibleRows(data, expandedPaths, isError),
|
||||
[data, expandedPaths, isError]
|
||||
)
|
||||
const useVirtualization = visibleRowCount > CONFIG.VIRTUALIZATION_THRESHOLD
|
||||
|
||||
const flatRows = useMemo(() => {
|
||||
if (!useVirtualization) return []
|
||||
return flattenTree(data, expandedPaths, pathToMatchIndices, isError)
|
||||
}, [data, expandedPaths, pathToMatchIndices, isError, useVirtualization])
|
||||
|
||||
// Scroll to match (virtualized)
|
||||
useEffect(() => {
|
||||
if (!useVirtualization || allMatchPaths.length === 0 || !listRef.current) return
|
||||
|
||||
const currentPath = allMatchPaths[currentMatchIndex]
|
||||
const targetPath = currentPath.endsWith('.value') ? currentPath : `${currentPath}.value`
|
||||
const rowIndex = flatRows.findIndex((r) => r.path === targetPath || r.path === currentPath)
|
||||
|
||||
if (rowIndex !== -1) {
|
||||
listRef.current.scrollToRow({ index: rowIndex, align: 'center' })
|
||||
}
|
||||
}, [currentMatchIndex, allMatchPaths, flatRows, listRef, useVirtualization])
|
||||
|
||||
// Scroll to match (non-virtualized)
|
||||
useEffect(() => {
|
||||
if (useVirtualization || allMatchPaths.length === 0) return
|
||||
|
||||
const rafId = requestAnimationFrame(() => {
|
||||
const match = internalRef.current?.querySelector(
|
||||
`[data-match-index="${currentMatchIndex}"]`
|
||||
) as HTMLElement | null
|
||||
match?.scrollIntoView({ block: 'center', behavior: 'smooth' })
|
||||
})
|
||||
|
||||
return () => cancelAnimationFrame(rafId)
|
||||
}, [currentMatchIndex, allMatchPaths.length, expandedPaths, useVirtualization])
|
||||
|
||||
const containerClass = cn('flex flex-col pl-[20px]', wrapText && 'overflow-x-hidden', className)
|
||||
const virtualizedContainerClass = cn('relative', wrapText && 'overflow-x-hidden', className)
|
||||
const listClass = wrapText ? 'overflow-x-hidden' : 'overflow-x-auto'
|
||||
|
||||
// Running state
|
||||
if (isRunning && data === undefined) {
|
||||
return (
|
||||
<div ref={setContainerRef} className={containerClass}>
|
||||
<div className={STYLES.row}>
|
||||
<span className={STYLES.keyName}>running</span>
|
||||
<Badge variant='green' className={STYLES.badge}>
|
||||
Running
|
||||
</Badge>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Empty state
|
||||
if (rootEntries.length === 0 && !isError) {
|
||||
return (
|
||||
<div ref={setContainerRef} className={containerClass}>
|
||||
<span className={STYLES.emptyValue}>null</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Virtualized rendering
|
||||
if (useVirtualization) {
|
||||
return (
|
||||
<div
|
||||
ref={setContainerRef}
|
||||
className={virtualizedContainerClass}
|
||||
style={{ height: containerHeight }}
|
||||
>
|
||||
<List
|
||||
listRef={listRef}
|
||||
defaultHeight={containerHeight}
|
||||
rowCount={flatRows.length}
|
||||
rowHeight={CONFIG.ROW_HEIGHT}
|
||||
rowComponent={VirtualizedRow}
|
||||
rowProps={{
|
||||
rows: flatRows,
|
||||
onToggle: handleToggle,
|
||||
wrapText,
|
||||
searchQuery: searchQuery ?? '',
|
||||
currentMatchIndex,
|
||||
}}
|
||||
overscanCount={CONFIG.OVERSCAN_COUNT}
|
||||
className={listClass}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Non-virtualized rendering (preserves exact original styling)
|
||||
if (isError) {
|
||||
return (
|
||||
<SearchContext.Provider value={searchContextValue}>
|
||||
<div ref={setContainerRef} className={containerClass}>
|
||||
<StructuredNode
|
||||
name='error'
|
||||
value={extractErrorMessage(data)}
|
||||
path='root.error'
|
||||
expandedPaths={expandedPaths}
|
||||
onToggle={handleToggle}
|
||||
wrapText={wrapText}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
isError
|
||||
/>
|
||||
</div>
|
||||
</SearchContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<SearchContext.Provider value={searchContextValue}>
|
||||
<div ref={setContainerRef} className={containerClass}>
|
||||
{rootEntries.map((entry) => (
|
||||
<StructuredNode
|
||||
key={entry.path}
|
||||
name={entry.key}
|
||||
value={entry.value}
|
||||
path={entry.path}
|
||||
expandedPaths={expandedPaths}
|
||||
onToggle={handleToggle}
|
||||
wrapText={wrapText}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</SearchContext.Provider>
|
||||
)
|
||||
})
|
||||
@@ -1,4 +0,0 @@
|
||||
export { OutputContextMenu, type OutputContextMenuProps } from './components/output-context-menu'
|
||||
export { StructuredOutput, type StructuredOutputProps } from './components/structured-output'
|
||||
export type { OutputPanelProps } from './output-panel'
|
||||
export { OutputPanel } from './output-panel'
|
||||
@@ -1,643 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import {
|
||||
ArrowDown,
|
||||
ArrowDownToLine,
|
||||
ArrowUp,
|
||||
Check,
|
||||
Clipboard,
|
||||
Database,
|
||||
MoreHorizontal,
|
||||
Palette,
|
||||
Pause,
|
||||
Search,
|
||||
Trash2,
|
||||
X,
|
||||
} from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import {
|
||||
Button,
|
||||
Code,
|
||||
Input,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverItem,
|
||||
PopoverTrigger,
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { FilterPopover } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/filter-popover'
|
||||
import { OutputContextMenu } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-panel/components/output-context-menu'
|
||||
import { StructuredOutput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-panel/components/structured-output'
|
||||
import { ToggleButton } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/toggle-button'
|
||||
import type {
|
||||
BlockInfo,
|
||||
TerminalFilters,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
|
||||
interface OutputCodeContentProps {
|
||||
code: string
|
||||
language: 'javascript' | 'json'
|
||||
wrapText: boolean
|
||||
searchQuery: string | undefined
|
||||
currentMatchIndex: number
|
||||
onMatchCountChange: (count: number) => void
|
||||
contentRef: React.RefObject<HTMLDivElement | null>
|
||||
}
|
||||
|
||||
const OutputCodeContent = React.memo(function OutputCodeContent({
|
||||
code,
|
||||
language,
|
||||
wrapText,
|
||||
searchQuery,
|
||||
currentMatchIndex,
|
||||
onMatchCountChange,
|
||||
contentRef,
|
||||
}: OutputCodeContentProps) {
|
||||
return (
|
||||
<Code.Viewer
|
||||
code={code}
|
||||
showGutter
|
||||
language={language}
|
||||
className='m-0 min-h-full rounded-none border-0 bg-[var(--surface-1)] dark:bg-[var(--surface-1)]'
|
||||
paddingLeft={8}
|
||||
gutterStyle={{ backgroundColor: 'transparent' }}
|
||||
wrapText={wrapText}
|
||||
searchQuery={searchQuery}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={onMatchCountChange}
|
||||
contentRef={contentRef}
|
||||
virtualized
|
||||
showCollapseColumn={language === 'json'}
|
||||
/>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Props for the OutputPanel component
|
||||
* Store-backed settings (wrapText, openOnRun, structuredView, outputPanelWidth)
|
||||
* are accessed directly from useTerminalStore to reduce prop drilling.
|
||||
*/
|
||||
export interface OutputPanelProps {
|
||||
selectedEntry: ConsoleEntry
|
||||
handleOutputPanelResizeMouseDown: (e: React.MouseEvent) => void
|
||||
handleHeaderClick: () => void
|
||||
isExpanded: boolean
|
||||
expandToLastHeight: () => void
|
||||
showInput: boolean
|
||||
setShowInput: (show: boolean) => void
|
||||
hasInputData: boolean
|
||||
isPlaygroundEnabled: boolean
|
||||
shouldShowTrainingButton: boolean
|
||||
isTraining: boolean
|
||||
handleTrainingClick: (e: React.MouseEvent) => void
|
||||
showCopySuccess: boolean
|
||||
handleCopy: () => void
|
||||
filteredEntries: ConsoleEntry[]
|
||||
handleExportConsole: (e: React.MouseEvent) => void
|
||||
hasActiveFilters: boolean
|
||||
handleClearConsole: (e: React.MouseEvent) => void
|
||||
shouldShowCodeDisplay: boolean
|
||||
outputDataStringified: string
|
||||
outputData: unknown
|
||||
handleClearConsoleFromMenu: () => void
|
||||
filters: TerminalFilters
|
||||
toggleBlock: (blockId: string) => void
|
||||
toggleStatus: (status: 'error' | 'info') => void
|
||||
uniqueBlocks: BlockInfo[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Output panel component that manages its own search state.
|
||||
* Accesses store-backed settings directly to reduce prop drilling.
|
||||
*/
|
||||
export const OutputPanel = React.memo(function OutputPanel({
|
||||
selectedEntry,
|
||||
handleOutputPanelResizeMouseDown,
|
||||
handleHeaderClick,
|
||||
isExpanded,
|
||||
expandToLastHeight,
|
||||
showInput,
|
||||
setShowInput,
|
||||
hasInputData,
|
||||
isPlaygroundEnabled,
|
||||
shouldShowTrainingButton,
|
||||
isTraining,
|
||||
handleTrainingClick,
|
||||
showCopySuccess,
|
||||
handleCopy,
|
||||
filteredEntries,
|
||||
handleExportConsole,
|
||||
hasActiveFilters,
|
||||
handleClearConsole,
|
||||
shouldShowCodeDisplay,
|
||||
outputDataStringified,
|
||||
outputData,
|
||||
handleClearConsoleFromMenu,
|
||||
filters,
|
||||
toggleBlock,
|
||||
toggleStatus,
|
||||
uniqueBlocks,
|
||||
}: OutputPanelProps) {
|
||||
// Access store-backed settings directly to reduce prop drilling
|
||||
const outputPanelWidth = useTerminalStore((state) => state.outputPanelWidth)
|
||||
const wrapText = useTerminalStore((state) => state.wrapText)
|
||||
const setWrapText = useTerminalStore((state) => state.setWrapText)
|
||||
const openOnRun = useTerminalStore((state) => state.openOnRun)
|
||||
const setOpenOnRun = useTerminalStore((state) => state.setOpenOnRun)
|
||||
const structuredView = useTerminalStore((state) => state.structuredView)
|
||||
const setStructuredView = useTerminalStore((state) => state.setStructuredView)
|
||||
|
||||
const outputContentRef = useRef<HTMLDivElement>(null)
|
||||
const [filtersOpen, setFiltersOpen] = useState(false)
|
||||
const [outputOptionsOpen, setOutputOptionsOpen] = useState(false)
|
||||
const {
|
||||
isSearchActive: isOutputSearchActive,
|
||||
searchQuery: outputSearchQuery,
|
||||
setSearchQuery: setOutputSearchQuery,
|
||||
matchCount,
|
||||
currentMatchIndex,
|
||||
activateSearch: activateOutputSearch,
|
||||
closeSearch: closeOutputSearch,
|
||||
goToNextMatch,
|
||||
goToPreviousMatch,
|
||||
handleMatchCountChange,
|
||||
searchInputRef: outputSearchInputRef,
|
||||
} = useCodeViewerFeatures({
|
||||
contentRef: outputContentRef,
|
||||
externalWrapText: wrapText,
|
||||
onWrapTextChange: setWrapText,
|
||||
})
|
||||
|
||||
// Context menu state for output panel
|
||||
const [hasSelection, setHasSelection] = useState(false)
|
||||
const [storedSelectionText, setStoredSelectionText] = useState('')
|
||||
const {
|
||||
isOpen: isOutputMenuOpen,
|
||||
position: outputMenuPosition,
|
||||
menuRef: outputMenuRef,
|
||||
handleContextMenu: handleOutputContextMenu,
|
||||
closeMenu: closeOutputMenu,
|
||||
} = useContextMenu()
|
||||
|
||||
const handleOutputPanelContextMenu = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
const selection = window.getSelection()
|
||||
const selectionText = selection?.toString() || ''
|
||||
setStoredSelectionText(selectionText)
|
||||
setHasSelection(selectionText.length > 0)
|
||||
handleOutputContextMenu(e)
|
||||
},
|
||||
[handleOutputContextMenu]
|
||||
)
|
||||
|
||||
const handleCopySelection = useCallback(() => {
|
||||
if (storedSelectionText) {
|
||||
navigator.clipboard.writeText(storedSelectionText)
|
||||
}
|
||||
}, [storedSelectionText])
|
||||
|
||||
// Memoized callbacks to avoid inline arrow functions
|
||||
const handleToggleStructuredView = useCallback(() => {
|
||||
setStructuredView(!structuredView)
|
||||
}, [structuredView, setStructuredView])
|
||||
|
||||
const handleToggleWrapText = useCallback(() => {
|
||||
setWrapText(!wrapText)
|
||||
}, [wrapText, setWrapText])
|
||||
|
||||
const handleToggleOpenOnRun = useCallback(() => {
|
||||
setOpenOnRun(!openOnRun)
|
||||
}, [openOnRun, setOpenOnRun])
|
||||
|
||||
const handleCopyClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
handleCopy()
|
||||
},
|
||||
[handleCopy]
|
||||
)
|
||||
|
||||
const handleSearchClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
activateOutputSearch()
|
||||
},
|
||||
[activateOutputSearch]
|
||||
)
|
||||
|
||||
const handleCloseSearchClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
closeOutputSearch()
|
||||
},
|
||||
[closeOutputSearch]
|
||||
)
|
||||
|
||||
const handleOutputButtonClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
if (!isExpanded) {
|
||||
expandToLastHeight()
|
||||
}
|
||||
if (showInput) setShowInput(false)
|
||||
},
|
||||
[isExpanded, expandToLastHeight, showInput, setShowInput]
|
||||
)
|
||||
|
||||
const handleInputButtonClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
if (!isExpanded) {
|
||||
expandToLastHeight()
|
||||
}
|
||||
setShowInput(true)
|
||||
},
|
||||
[isExpanded, expandToLastHeight, setShowInput]
|
||||
)
|
||||
|
||||
const handleToggleButtonClick = useCallback(
|
||||
(e: React.MouseEvent) => {
|
||||
e.stopPropagation()
|
||||
handleHeaderClick()
|
||||
},
|
||||
[handleHeaderClick]
|
||||
)
|
||||
|
||||
/**
|
||||
* Track text selection state for context menu.
|
||||
* Skip updates when the context menu is open to prevent the selection
|
||||
* state from changing mid-click (which would disable the copy button).
|
||||
*/
|
||||
useEffect(() => {
|
||||
const handleSelectionChange = () => {
|
||||
if (isOutputMenuOpen) return
|
||||
|
||||
const selection = window.getSelection()
|
||||
setHasSelection(Boolean(selection && selection.toString().length > 0))
|
||||
}
|
||||
|
||||
document.addEventListener('selectionchange', handleSelectionChange)
|
||||
return () => document.removeEventListener('selectionchange', handleSelectionChange)
|
||||
}, [isOutputMenuOpen])
|
||||
|
||||
// Memoize the search query for structured output to avoid re-renders
|
||||
const structuredSearchQuery = useMemo(
|
||||
() => (isOutputSearchActive ? outputSearchQuery : undefined),
|
||||
[isOutputSearchActive, outputSearchQuery]
|
||||
)
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
className='absolute top-0 right-0 bottom-0 flex flex-col border-[var(--border)] border-l bg-[var(--surface-1)]'
|
||||
style={{ width: `${outputPanelWidth}px` }}
|
||||
>
|
||||
{/* Horizontal Resize Handle */}
|
||||
<div
|
||||
className='-ml-[4px] absolute top-0 bottom-0 left-0 z-20 w-[8px] cursor-ew-resize'
|
||||
onMouseDown={handleOutputPanelResizeMouseDown}
|
||||
role='separator'
|
||||
aria-label='Resize output panel'
|
||||
aria-orientation='vertical'
|
||||
/>
|
||||
|
||||
{/* Header */}
|
||||
<div
|
||||
className='group flex h-[30px] flex-shrink-0 cursor-pointer items-center justify-between bg-[var(--surface-1)] pr-[16px] pl-[10px]'
|
||||
onClick={handleHeaderClick}
|
||||
>
|
||||
<div className='flex items-center'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className={clsx(
|
||||
'px-[8px] py-[6px] text-[12px]',
|
||||
!showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
|
||||
)}
|
||||
onClick={handleOutputButtonClick}
|
||||
aria-label='Show output'
|
||||
>
|
||||
Output
|
||||
</Button>
|
||||
{hasInputData && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className={clsx(
|
||||
'px-[8px] py-[6px] text-[12px]',
|
||||
showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
|
||||
)}
|
||||
onClick={handleInputButtonClick}
|
||||
aria-label='Show input'
|
||||
>
|
||||
Input
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex flex-shrink-0 items-center gap-[8px]'>
|
||||
{/* Unified filter popover */}
|
||||
{filteredEntries.length > 0 && (
|
||||
<FilterPopover
|
||||
open={filtersOpen}
|
||||
onOpenChange={setFiltersOpen}
|
||||
filters={filters}
|
||||
toggleStatus={toggleStatus}
|
||||
toggleBlock={toggleBlock}
|
||||
uniqueBlocks={uniqueBlocks}
|
||||
hasActiveFilters={hasActiveFilters}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isOutputSearchActive ? (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleCloseSearchClick}
|
||||
aria-label='Close search'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>Close search</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
) : (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleSearchClick}
|
||||
aria-label='Search in output'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<Search className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>Search</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{isPlaygroundEnabled && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Link href='/playground'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
aria-label='Component Playground'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<Palette className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</Link>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>Component Playground</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
{shouldShowTrainingButton && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleTrainingClick}
|
||||
aria-label={isTraining ? 'Stop training' : 'Train Copilot'}
|
||||
className={clsx(
|
||||
'!p-1.5 -m-1.5',
|
||||
isTraining && 'text-orange-600 dark:text-orange-400'
|
||||
)}
|
||||
>
|
||||
{isTraining ? (
|
||||
<Pause className='h-[12px] w-[12px]' />
|
||||
) : (
|
||||
<Database className='h-[12px] w-[12px]' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{isTraining ? 'Stop Training' : 'Train Copilot'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleCopyClick}
|
||||
aria-label='Copy output'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
{showCopySuccess ? (
|
||||
<Check className='h-[12px] w-[12px]' />
|
||||
) : (
|
||||
<Clipboard className='h-[12px] w-[12px]' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{showCopySuccess ? 'Copied' : 'Copy output'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{filteredEntries.length > 0 && (
|
||||
<>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleExportConsole}
|
||||
aria-label='Download console CSV'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<ArrowDownToLine className='h-3 w-3' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>Download CSV</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={handleClearConsole}
|
||||
aria-label='Clear console'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<Trash2 className='h-3 w-3' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<Tooltip.Shortcut keys='⌘D'>Clear console</Tooltip.Shortcut>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</>
|
||||
)}
|
||||
<Popover open={outputOptionsOpen} onOpenChange={setOutputOptionsOpen} size='sm'>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
aria-label='Terminal options'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<MoreHorizontal className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent
|
||||
side='bottom'
|
||||
align='end'
|
||||
sideOffset={4}
|
||||
collisionPadding={0}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
style={{ minWidth: '140px', maxWidth: '160px' }}
|
||||
className='gap-[2px]'
|
||||
>
|
||||
<PopoverItem
|
||||
active={structuredView}
|
||||
showCheck={structuredView}
|
||||
onClick={handleToggleStructuredView}
|
||||
>
|
||||
<span>Structured view</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem active={wrapText} showCheck={wrapText} onClick={handleToggleWrapText}>
|
||||
<span>Wrap text</span>
|
||||
</PopoverItem>
|
||||
<PopoverItem
|
||||
active={openOnRun}
|
||||
showCheck={openOnRun}
|
||||
onClick={handleToggleOpenOnRun}
|
||||
>
|
||||
<span>Open on run</span>
|
||||
</PopoverItem>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
<ToggleButton isExpanded={isExpanded} onClick={handleToggleButtonClick} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Search Overlay */}
|
||||
{isOutputSearchActive && (
|
||||
<div
|
||||
className='absolute top-[30px] right-[8px] z-30 flex h-[34px] items-center gap-[6px] rounded-b-[4px] border border-[var(--border)] border-t-0 bg-[var(--surface-1)] px-[6px] shadow-sm'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
data-toolbar-root
|
||||
data-search-active='true'
|
||||
>
|
||||
<Input
|
||||
ref={outputSearchInputRef}
|
||||
type='text'
|
||||
value={outputSearchQuery}
|
||||
onChange={(e) => setOutputSearchQuery(e.target.value)}
|
||||
placeholder='Search...'
|
||||
className='mr-[2px] h-[23px] w-[94px] text-[12px]'
|
||||
/>
|
||||
<span
|
||||
className={clsx(
|
||||
'w-[58px] font-medium text-[11px]',
|
||||
matchCount > 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]'
|
||||
)}
|
||||
>
|
||||
{matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : 'No results'}
|
||||
</span>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={goToPreviousMatch}
|
||||
aria-label='Previous match'
|
||||
className='!p-1.5 -m-1.5'
|
||||
disabled={matchCount === 0}
|
||||
>
|
||||
<ArrowUp className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={goToNextMatch}
|
||||
aria-label='Next match'
|
||||
className='!p-1.5 -m-1.5'
|
||||
disabled={matchCount === 0}
|
||||
>
|
||||
<ArrowDown className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={closeOutputSearch}
|
||||
aria-label='Close search'
|
||||
className='!p-1.5 -m-1.5'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Content */}
|
||||
<div
|
||||
className={clsx('flex-1 overflow-y-auto', !wrapText && 'overflow-x-auto')}
|
||||
onContextMenu={handleOutputPanelContextMenu}
|
||||
>
|
||||
{shouldShowCodeDisplay ? (
|
||||
<OutputCodeContent
|
||||
code={selectedEntry.input.code}
|
||||
language={(selectedEntry.input.language as 'javascript' | 'json') || 'javascript'}
|
||||
wrapText={wrapText}
|
||||
searchQuery={structuredSearchQuery}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={handleMatchCountChange}
|
||||
contentRef={outputContentRef}
|
||||
/>
|
||||
) : structuredView ? (
|
||||
<StructuredOutput
|
||||
data={outputData}
|
||||
wrapText={wrapText}
|
||||
isError={!showInput && Boolean(selectedEntry.error)}
|
||||
isRunning={!showInput && Boolean(selectedEntry.isRunning)}
|
||||
className='min-h-full'
|
||||
searchQuery={structuredSearchQuery}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={handleMatchCountChange}
|
||||
contentRef={outputContentRef}
|
||||
/>
|
||||
) : (
|
||||
<OutputCodeContent
|
||||
code={outputDataStringified}
|
||||
language='json'
|
||||
wrapText={wrapText}
|
||||
searchQuery={structuredSearchQuery}
|
||||
currentMatchIndex={currentMatchIndex}
|
||||
onMatchCountChange={handleMatchCountChange}
|
||||
contentRef={outputContentRef}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Output Panel Context Menu */}
|
||||
<OutputContextMenu
|
||||
isOpen={isOutputMenuOpen}
|
||||
position={outputMenuPosition}
|
||||
menuRef={outputMenuRef}
|
||||
onClose={closeOutputMenu}
|
||||
onCopySelection={handleCopySelection}
|
||||
onCopyAll={handleCopy}
|
||||
onSearch={activateOutputSearch}
|
||||
structuredView={structuredView}
|
||||
onToggleStructuredView={handleToggleStructuredView}
|
||||
wrapText={wrapText}
|
||||
onToggleWrap={handleToggleWrapText}
|
||||
openOnRun={openOnRun}
|
||||
onToggleOpenOnRun={handleToggleOpenOnRun}
|
||||
onClearConsole={handleClearConsoleFromMenu}
|
||||
hasSelection={hasSelection}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
export { RunningBadge, StatusDisplay, type StatusDisplayProps } from './status-display'
|
||||
@@ -1,43 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import { memo } from 'react'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { BADGE_STYLE } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
|
||||
/**
|
||||
* Running badge component - displays a consistent "Running" indicator
|
||||
*/
|
||||
export const RunningBadge = memo(function RunningBadge() {
|
||||
return (
|
||||
<Badge variant='green' className={BADGE_STYLE}>
|
||||
Running
|
||||
</Badge>
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Props for StatusDisplay component
|
||||
*/
|
||||
export interface StatusDisplayProps {
|
||||
isRunning: boolean
|
||||
isCanceled: boolean
|
||||
formattedDuration: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable status display for terminal rows.
|
||||
* Shows Running badge, 'canceled' text, or formatted duration.
|
||||
*/
|
||||
export const StatusDisplay = memo(function StatusDisplay({
|
||||
isRunning,
|
||||
isCanceled,
|
||||
formattedDuration,
|
||||
}: StatusDisplayProps) {
|
||||
if (isRunning) {
|
||||
return <RunningBadge />
|
||||
}
|
||||
if (isCanceled) {
|
||||
return <>canceled</>
|
||||
}
|
||||
return <>{formattedDuration}</>
|
||||
})
|
||||
@@ -1 +0,0 @@
|
||||
export { ToggleButton, type ToggleButtonProps } from './toggle-button'
|
||||
@@ -1,33 +0,0 @@
|
||||
'use client'
|
||||
|
||||
import type React from 'react'
|
||||
import { memo } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronDown } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
export interface ToggleButtonProps {
|
||||
isExpanded: boolean
|
||||
onClick: (e: React.MouseEvent) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle button component for terminal expand/collapse
|
||||
*/
|
||||
export const ToggleButton = memo(function ToggleButton({ isExpanded, onClick }: ToggleButtonProps) {
|
||||
return (
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='!p-1.5 -m-1.5'
|
||||
onClick={onClick}
|
||||
aria-label='Toggle terminal'
|
||||
>
|
||||
<ChevronDown
|
||||
className={clsx(
|
||||
'h-3.5 w-3.5 flex-shrink-0 transition-transform duration-100',
|
||||
!isExpanded && 'rotate-180'
|
||||
)}
|
||||
/>
|
||||
</Button>
|
||||
)
|
||||
})
|
||||
@@ -1,4 +1,3 @@
|
||||
export type { SortConfig, SortDirection, SortField, TerminalFilters } from '../types'
|
||||
export { useOutputPanelResize } from './use-output-panel-resize'
|
||||
export { useTerminalFilters } from './use-terminal-filters'
|
||||
export { useTerminalResize } from './use-terminal-resize'
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { OUTPUT_PANEL_WIDTH, TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
|
||||
import { OUTPUT_PANEL_WIDTH } from '@/stores/constants'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
|
||||
const BLOCK_COLUMN_WIDTH = 240
|
||||
|
||||
export function useOutputPanelResize() {
|
||||
const setOutputPanelWidth = useTerminalStore((state) => state.setOutputPanelWidth)
|
||||
const [isResizing, setIsResizing] = useState(false)
|
||||
@@ -23,7 +25,7 @@ export function useOutputPanelResize() {
|
||||
|
||||
const newWidth = window.innerWidth - e.clientX - panelWidth
|
||||
const terminalWidth = window.innerWidth - sidebarWidth - panelWidth
|
||||
const maxWidth = terminalWidth - TERMINAL_BLOCK_COLUMN_WIDTH
|
||||
const maxWidth = terminalWidth - BLOCK_COLUMN_WIDTH
|
||||
const clampedWidth = Math.max(OUTPUT_PANEL_WIDTH.MIN, Math.min(newWidth, maxWidth))
|
||||
|
||||
setOutputPanelWidth(clampedWidth)
|
||||
|
||||
@@ -1,10 +1,26 @@
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import type {
|
||||
SortConfig,
|
||||
TerminalFilters,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
|
||||
/**
|
||||
* Sort configuration
|
||||
*/
|
||||
export type SortField = 'timestamp'
|
||||
export type SortDirection = 'asc' | 'desc'
|
||||
|
||||
export interface SortConfig {
|
||||
field: SortField
|
||||
direction: SortDirection
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter configuration state
|
||||
*/
|
||||
export interface TerminalFilters {
|
||||
blockIds: Set<string>
|
||||
statuses: Set<'error' | 'info'>
|
||||
runIds: Set<string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom hook to manage terminal filters and sorting.
|
||||
* Provides filter state, sort state, and filtering/sorting logic for console entries.
|
||||
@@ -15,6 +31,7 @@ export function useTerminalFilters() {
|
||||
const [filters, setFilters] = useState<TerminalFilters>({
|
||||
blockIds: new Set(),
|
||||
statuses: new Set(),
|
||||
runIds: new Set(),
|
||||
})
|
||||
|
||||
const [sortConfig, setSortConfig] = useState<SortConfig>({
|
||||
@@ -52,6 +69,21 @@ export function useTerminalFilters() {
|
||||
})
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Toggles a run ID filter
|
||||
*/
|
||||
const toggleRunId = useCallback((runId: string) => {
|
||||
setFilters((prev) => {
|
||||
const newRunIds = new Set(prev.runIds)
|
||||
if (newRunIds.has(runId)) {
|
||||
newRunIds.delete(runId)
|
||||
} else {
|
||||
newRunIds.add(runId)
|
||||
}
|
||||
return { ...prev, runIds: newRunIds }
|
||||
})
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Toggles sort direction between ascending and descending
|
||||
*/
|
||||
@@ -69,6 +101,7 @@ export function useTerminalFilters() {
|
||||
setFilters({
|
||||
blockIds: new Set(),
|
||||
statuses: new Set(),
|
||||
runIds: new Set(),
|
||||
})
|
||||
}, [])
|
||||
|
||||
@@ -76,7 +109,7 @@ export function useTerminalFilters() {
|
||||
* Checks if any filters are active
|
||||
*/
|
||||
const hasActiveFilters = useMemo(() => {
|
||||
return filters.blockIds.size > 0 || filters.statuses.size > 0
|
||||
return filters.blockIds.size > 0 || filters.statuses.size > 0 || filters.runIds.size > 0
|
||||
}, [filters])
|
||||
|
||||
/**
|
||||
@@ -101,6 +134,14 @@ export function useTerminalFilters() {
|
||||
if (!hasStatus) return false
|
||||
}
|
||||
|
||||
// Run ID filter
|
||||
if (
|
||||
filters.runIds.size > 0 &&
|
||||
(!entry.executionId || !filters.runIds.has(entry.executionId))
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
@@ -123,6 +164,7 @@ export function useTerminalFilters() {
|
||||
sortConfig,
|
||||
toggleBlock,
|
||||
toggleStatus,
|
||||
toggleRunId,
|
||||
toggleSort,
|
||||
clearFilters,
|
||||
hasActiveFilters,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,64 +0,0 @@
|
||||
/**
|
||||
* Terminal filter configuration state
|
||||
*/
|
||||
export interface TerminalFilters {
|
||||
blockIds: Set<string>
|
||||
statuses: Set<'error' | 'info'>
|
||||
}
|
||||
|
||||
/**
|
||||
* Context menu position for positioning floating menus
|
||||
*/
|
||||
export interface ContextMenuPosition {
|
||||
x: number
|
||||
y: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort field options for terminal entries
|
||||
*/
|
||||
export type SortField = 'timestamp'
|
||||
|
||||
/**
|
||||
* Sort direction options
|
||||
*/
|
||||
export type SortDirection = 'asc' | 'desc'
|
||||
|
||||
/**
|
||||
* Sort configuration for terminal entries
|
||||
*/
|
||||
export interface SortConfig {
|
||||
field: SortField
|
||||
direction: SortDirection
|
||||
}
|
||||
|
||||
/**
|
||||
* Status type for console entries
|
||||
*/
|
||||
export type EntryStatus = 'error' | 'info'
|
||||
|
||||
/**
|
||||
* Block information for filters
|
||||
*/
|
||||
export interface BlockInfo {
|
||||
blockId: string
|
||||
blockName: string
|
||||
blockType: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Common row styling classes for terminal components
|
||||
*/
|
||||
export const ROW_STYLES = {
|
||||
base: 'group flex cursor-pointer items-center justify-between gap-[8px] rounded-[8px] px-[6px]',
|
||||
selected: 'bg-[var(--surface-6)] dark:bg-[var(--surface-5)]',
|
||||
hover: 'hover:bg-[var(--surface-6)] dark:hover:bg-[var(--surface-5)]',
|
||||
nested:
|
||||
'mt-[2px] ml-[3px] flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[9px]',
|
||||
iconButton: '!p-1.5 -m-1.5',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Common badge styling for status badges
|
||||
*/
|
||||
export const BADGE_STYLE = 'rounded-[4px] px-[4px] py-[0px] text-[11px]'
|
||||
@@ -1,452 +0,0 @@
|
||||
import type React from 'react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
|
||||
/**
|
||||
* Subflow colors matching the subflow tool configs
|
||||
*/
|
||||
const SUBFLOW_COLORS = {
|
||||
loop: '#2FB3FF',
|
||||
parallel: '#FEE12B',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Retrieves the icon component for a given block type
|
||||
*/
|
||||
export function getBlockIcon(
|
||||
blockType: string
|
||||
): React.ComponentType<{ className?: string }> | null {
|
||||
const blockConfig = getBlock(blockType)
|
||||
|
||||
if (blockConfig?.icon) {
|
||||
return blockConfig.icon
|
||||
}
|
||||
|
||||
if (blockType === 'loop') {
|
||||
return RepeatIcon
|
||||
}
|
||||
|
||||
if (blockType === 'parallel') {
|
||||
return SplitIcon
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the background color for a block type
|
||||
*/
|
||||
export function getBlockColor(blockType: string): string {
|
||||
const blockConfig = getBlock(blockType)
|
||||
if (blockConfig?.bgColor) {
|
||||
return blockConfig.bgColor
|
||||
}
|
||||
// Use proper subflow colors matching the toolbar configs
|
||||
if (blockType === 'loop') {
|
||||
return SUBFLOW_COLORS.loop
|
||||
}
|
||||
if (blockType === 'parallel') {
|
||||
return SUBFLOW_COLORS.parallel
|
||||
}
|
||||
return '#6b7280'
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats duration from milliseconds to readable format
|
||||
*/
|
||||
export function formatDuration(ms?: number): string {
|
||||
if (ms === undefined || ms === null) return '-'
|
||||
if (ms < 1000) return `${ms}ms`
|
||||
return `${(ms / 1000).toFixed(2)}s`
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if a keyboard event originated from a text-editable element
|
||||
*/
|
||||
export function isEventFromEditableElement(e: KeyboardEvent): boolean {
|
||||
const target = e.target as HTMLElement | null
|
||||
if (!target) return false
|
||||
|
||||
const isEditable = (el: HTMLElement | null): boolean => {
|
||||
if (!el) return false
|
||||
if (el instanceof HTMLInputElement) return true
|
||||
if (el instanceof HTMLTextAreaElement) return true
|
||||
if ((el as HTMLElement).isContentEditable) return true
|
||||
const role = el.getAttribute('role')
|
||||
if (role === 'textbox' || role === 'combobox') return true
|
||||
return false
|
||||
}
|
||||
|
||||
let el: HTMLElement | null = target
|
||||
while (el) {
|
||||
if (isEditable(el)) return true
|
||||
el = el.parentElement
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block type is a subflow (loop or parallel)
|
||||
*/
|
||||
export function isSubflowBlockType(blockType: string): boolean {
|
||||
const lower = blockType?.toLowerCase() || ''
|
||||
return lower === 'loop' || lower === 'parallel'
|
||||
}
|
||||
|
||||
/**
|
||||
* Node type for the tree structure
|
||||
*/
|
||||
export type EntryNodeType = 'block' | 'subflow' | 'iteration'
|
||||
|
||||
/**
|
||||
* Entry node for tree structure - represents a block, subflow, or iteration
|
||||
*/
|
||||
export interface EntryNode {
|
||||
/** The console entry (for blocks) or synthetic entry (for subflows/iterations) */
|
||||
entry: ConsoleEntry
|
||||
/** Child nodes */
|
||||
children: EntryNode[]
|
||||
/** Node type */
|
||||
nodeType: EntryNodeType
|
||||
/** Iteration info for iteration nodes */
|
||||
iterationInfo?: {
|
||||
current: number
|
||||
total?: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execution group interface for grouping entries by execution
|
||||
*/
|
||||
export interface ExecutionGroup {
|
||||
executionId: string
|
||||
startTime: string
|
||||
endTime: string
|
||||
startTimeMs: number
|
||||
endTimeMs: number
|
||||
duration: number
|
||||
status: 'success' | 'error'
|
||||
/** Flat list of entries (legacy, kept for filters) */
|
||||
entries: ConsoleEntry[]
|
||||
/** Tree structure of entry nodes for nested display */
|
||||
entryTree: EntryNode[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Iteration group for grouping blocks within the same iteration
|
||||
*/
|
||||
interface IterationGroup {
|
||||
iterationType: string
|
||||
iterationCurrent: number
|
||||
iterationTotal?: number
|
||||
blocks: ConsoleEntry[]
|
||||
startTimeMs: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a tree structure from flat entries.
|
||||
* Groups iteration entries by (iterationType, iterationCurrent), showing all blocks
|
||||
* that executed within each iteration.
|
||||
* Sorts by start time to ensure chronological order.
|
||||
*/
|
||||
function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
|
||||
// Separate regular blocks from iteration entries
|
||||
const regularBlocks: ConsoleEntry[] = []
|
||||
const iterationEntries: ConsoleEntry[] = []
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.iterationType && entry.iterationCurrent !== undefined) {
|
||||
iterationEntries.push(entry)
|
||||
} else {
|
||||
regularBlocks.push(entry)
|
||||
}
|
||||
}
|
||||
|
||||
// Group iteration entries by (iterationType, iterationCurrent)
|
||||
const iterationGroupsMap = new Map<string, IterationGroup>()
|
||||
for (const entry of iterationEntries) {
|
||||
const key = `${entry.iterationType}-${entry.iterationCurrent}`
|
||||
let group = iterationGroupsMap.get(key)
|
||||
const entryStartMs = new Date(entry.startedAt || entry.timestamp).getTime()
|
||||
|
||||
if (!group) {
|
||||
group = {
|
||||
iterationType: entry.iterationType!,
|
||||
iterationCurrent: entry.iterationCurrent!,
|
||||
iterationTotal: entry.iterationTotal,
|
||||
blocks: [],
|
||||
startTimeMs: entryStartMs,
|
||||
}
|
||||
iterationGroupsMap.set(key, group)
|
||||
} else {
|
||||
// Update start time to earliest
|
||||
if (entryStartMs < group.startTimeMs) {
|
||||
group.startTimeMs = entryStartMs
|
||||
}
|
||||
// Update total if available
|
||||
if (entry.iterationTotal !== undefined) {
|
||||
group.iterationTotal = entry.iterationTotal
|
||||
}
|
||||
}
|
||||
group.blocks.push(entry)
|
||||
}
|
||||
|
||||
// Sort blocks within each iteration by start time ascending (oldest first, top-down)
|
||||
for (const group of iterationGroupsMap.values()) {
|
||||
group.blocks.sort((a, b) => {
|
||||
const aStart = new Date(a.startedAt || a.timestamp).getTime()
|
||||
const bStart = new Date(b.startedAt || b.timestamp).getTime()
|
||||
return aStart - bStart
|
||||
})
|
||||
}
|
||||
|
||||
// Group iterations by iterationType to create subflow parents
|
||||
const subflowGroups = new Map<string, IterationGroup[]>()
|
||||
for (const group of iterationGroupsMap.values()) {
|
||||
const type = group.iterationType
|
||||
let groups = subflowGroups.get(type)
|
||||
if (!groups) {
|
||||
groups = []
|
||||
subflowGroups.set(type, groups)
|
||||
}
|
||||
groups.push(group)
|
||||
}
|
||||
|
||||
// Sort iterations within each subflow by iteration number
|
||||
for (const groups of subflowGroups.values()) {
|
||||
groups.sort((a, b) => a.iterationCurrent - b.iterationCurrent)
|
||||
}
|
||||
|
||||
// Build subflow nodes with iteration children
|
||||
const subflowNodes: EntryNode[] = []
|
||||
for (const [iterationType, iterationGroups] of subflowGroups.entries()) {
|
||||
// Calculate subflow timing from all its iterations
|
||||
const firstIteration = iterationGroups[0]
|
||||
const allBlocks = iterationGroups.flatMap((g) => g.blocks)
|
||||
const subflowStartMs = Math.min(
|
||||
...allBlocks.map((b) => new Date(b.startedAt || b.timestamp).getTime())
|
||||
)
|
||||
const subflowEndMs = Math.max(
|
||||
...allBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
|
||||
)
|
||||
const totalDuration = allBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
|
||||
|
||||
// Create synthetic subflow parent entry
|
||||
const syntheticSubflow: ConsoleEntry = {
|
||||
id: `subflow-${iterationType}-${firstIteration.blocks[0]?.executionId || 'unknown'}`,
|
||||
timestamp: new Date(subflowStartMs).toISOString(),
|
||||
workflowId: firstIteration.blocks[0]?.workflowId || '',
|
||||
blockId: `${iterationType}-container`,
|
||||
blockName: iterationType.charAt(0).toUpperCase() + iterationType.slice(1),
|
||||
blockType: iterationType,
|
||||
executionId: firstIteration.blocks[0]?.executionId,
|
||||
startedAt: new Date(subflowStartMs).toISOString(),
|
||||
endedAt: new Date(subflowEndMs).toISOString(),
|
||||
durationMs: totalDuration,
|
||||
success: !allBlocks.some((b) => b.error),
|
||||
}
|
||||
|
||||
// Build iteration child nodes
|
||||
const iterationNodes: EntryNode[] = iterationGroups.map((iterGroup) => {
|
||||
// Create synthetic iteration entry
|
||||
const iterBlocks = iterGroup.blocks
|
||||
const iterStartMs = Math.min(
|
||||
...iterBlocks.map((b) => new Date(b.startedAt || b.timestamp).getTime())
|
||||
)
|
||||
const iterEndMs = Math.max(
|
||||
...iterBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
|
||||
)
|
||||
const iterDuration = iterBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
|
||||
|
||||
const syntheticIteration: ConsoleEntry = {
|
||||
id: `iteration-${iterationType}-${iterGroup.iterationCurrent}-${iterBlocks[0]?.executionId || 'unknown'}`,
|
||||
timestamp: new Date(iterStartMs).toISOString(),
|
||||
workflowId: iterBlocks[0]?.workflowId || '',
|
||||
blockId: `iteration-${iterGroup.iterationCurrent}`,
|
||||
blockName: `Iteration ${iterGroup.iterationCurrent}${iterGroup.iterationTotal !== undefined ? ` / ${iterGroup.iterationTotal}` : ''}`,
|
||||
blockType: iterationType,
|
||||
executionId: iterBlocks[0]?.executionId,
|
||||
startedAt: new Date(iterStartMs).toISOString(),
|
||||
endedAt: new Date(iterEndMs).toISOString(),
|
||||
durationMs: iterDuration,
|
||||
success: !iterBlocks.some((b) => b.error),
|
||||
iterationCurrent: iterGroup.iterationCurrent,
|
||||
iterationTotal: iterGroup.iterationTotal,
|
||||
iterationType: iterationType as 'loop' | 'parallel',
|
||||
}
|
||||
|
||||
// Block nodes within this iteration
|
||||
const blockNodes: EntryNode[] = iterBlocks.map((block) => ({
|
||||
entry: block,
|
||||
children: [],
|
||||
nodeType: 'block' as const,
|
||||
}))
|
||||
|
||||
return {
|
||||
entry: syntheticIteration,
|
||||
children: blockNodes,
|
||||
nodeType: 'iteration' as const,
|
||||
iterationInfo: {
|
||||
current: iterGroup.iterationCurrent,
|
||||
total: iterGroup.iterationTotal,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
subflowNodes.push({
|
||||
entry: syntheticSubflow,
|
||||
children: iterationNodes,
|
||||
nodeType: 'subflow' as const,
|
||||
})
|
||||
}
|
||||
|
||||
// Build nodes for regular blocks
|
||||
const regularNodes: EntryNode[] = regularBlocks.map((entry) => ({
|
||||
entry,
|
||||
children: [],
|
||||
nodeType: 'block' as const,
|
||||
}))
|
||||
|
||||
// Combine all nodes and sort by start time ascending (oldest first, top-down)
|
||||
const allNodes = [...subflowNodes, ...regularNodes]
|
||||
allNodes.sort((a, b) => {
|
||||
const aStart = new Date(a.entry.startedAt || a.entry.timestamp).getTime()
|
||||
const bStart = new Date(b.entry.startedAt || b.entry.timestamp).getTime()
|
||||
return aStart - bStart
|
||||
})
|
||||
|
||||
return allNodes
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups console entries by execution ID and builds a tree structure.
|
||||
* Pre-computes timestamps for efficient sorting.
|
||||
*/
|
||||
export function groupEntriesByExecution(entries: ConsoleEntry[]): ExecutionGroup[] {
|
||||
const groups = new Map<
|
||||
string,
|
||||
{ meta: Omit<ExecutionGroup, 'entryTree'>; entries: ConsoleEntry[] }
|
||||
>()
|
||||
|
||||
for (const entry of entries) {
|
||||
const execId = entry.executionId || entry.id
|
||||
|
||||
const entryStartTime = entry.startedAt || entry.timestamp
|
||||
const entryEndTime = entry.endedAt || entry.timestamp
|
||||
const entryStartMs = new Date(entryStartTime).getTime()
|
||||
const entryEndMs = new Date(entryEndTime).getTime()
|
||||
|
||||
let group = groups.get(execId)
|
||||
|
||||
if (!group) {
|
||||
group = {
|
||||
meta: {
|
||||
executionId: execId,
|
||||
startTime: entryStartTime,
|
||||
endTime: entryEndTime,
|
||||
startTimeMs: entryStartMs,
|
||||
endTimeMs: entryEndMs,
|
||||
duration: 0,
|
||||
status: 'success',
|
||||
entries: [],
|
||||
},
|
||||
entries: [],
|
||||
}
|
||||
groups.set(execId, group)
|
||||
} else {
|
||||
// Update timing bounds
|
||||
if (entryStartMs < group.meta.startTimeMs) {
|
||||
group.meta.startTime = entryStartTime
|
||||
group.meta.startTimeMs = entryStartMs
|
||||
}
|
||||
if (entryEndMs > group.meta.endTimeMs) {
|
||||
group.meta.endTime = entryEndTime
|
||||
group.meta.endTimeMs = entryEndMs
|
||||
}
|
||||
}
|
||||
|
||||
// Check for errors
|
||||
if (entry.error) {
|
||||
group.meta.status = 'error'
|
||||
}
|
||||
|
||||
group.entries.push(entry)
|
||||
}
|
||||
|
||||
// Build tree structure for each group
|
||||
const result: ExecutionGroup[] = []
|
||||
for (const group of groups.values()) {
|
||||
group.meta.duration = group.meta.endTimeMs - group.meta.startTimeMs
|
||||
group.meta.entries = group.entries
|
||||
result.push({
|
||||
...group.meta,
|
||||
entryTree: buildEntryTree(group.entries),
|
||||
})
|
||||
}
|
||||
|
||||
// Sort by start time descending (newest first)
|
||||
result.sort((a, b) => b.startTimeMs - a.startTimeMs)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens entry tree into display order for keyboard navigation
|
||||
*/
|
||||
export function flattenEntryTree(nodes: EntryNode[]): ConsoleEntry[] {
|
||||
const result: ConsoleEntry[] = []
|
||||
for (const node of nodes) {
|
||||
result.push(node.entry)
|
||||
if (node.children.length > 0) {
|
||||
result.push(...flattenEntryTree(node.children))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Block entry with parent tracking for navigation
|
||||
*/
|
||||
export interface NavigableBlockEntry {
|
||||
entry: ConsoleEntry
|
||||
executionId: string
|
||||
/** IDs of parent nodes (subflows, iterations) that contain this block */
|
||||
parentNodeIds: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Flattens entry tree to only include actual block entries (not subflows/iterations).
|
||||
* Also tracks parent node IDs for auto-expanding when navigating.
|
||||
*/
|
||||
export function flattenBlockEntriesOnly(
|
||||
nodes: EntryNode[],
|
||||
executionId: string,
|
||||
parentIds: string[] = []
|
||||
): NavigableBlockEntry[] {
|
||||
const result: NavigableBlockEntry[] = []
|
||||
for (const node of nodes) {
|
||||
if (node.nodeType === 'block') {
|
||||
result.push({
|
||||
entry: node.entry,
|
||||
executionId,
|
||||
parentNodeIds: parentIds,
|
||||
})
|
||||
}
|
||||
if (node.children.length > 0) {
|
||||
const newParentIds = node.nodeType !== 'block' ? [...parentIds, node.entry.id] : parentIds
|
||||
result.push(...flattenBlockEntriesOnly(node.children, executionId, newParentIds))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Terminal height configuration constants
|
||||
*/
|
||||
export const TERMINAL_CONFIG = {
|
||||
NEAR_MIN_THRESHOLD: 40,
|
||||
BLOCK_COLUMN_WIDTH_PX: TERMINAL_BLOCK_COLUMN_WIDTH,
|
||||
HEADER_TEXT_CLASS: 'font-medium text-[var(--text-tertiary)] text-[12px]',
|
||||
} as const
|
||||
@@ -15,13 +15,16 @@ import {
|
||||
TriggerUtils,
|
||||
} from '@/lib/workflows/triggers/triggers'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
import { useExecutionStream } from '@/hooks/use-execution-stream'
|
||||
import { WorkflowValidationError } from '@/serializer'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
|
||||
@@ -81,8 +84,7 @@ export function useWorkflowExecution() {
|
||||
const queryClient = useQueryClient()
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
||||
useTerminalConsoleStore()
|
||||
const { toggleConsole, addConsole } = useTerminalConsoleStore()
|
||||
const { getAllVariables } = useEnvironmentStore()
|
||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||
const {
|
||||
@@ -99,11 +101,15 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
setLastExecutionSnapshot,
|
||||
getLastExecutionSnapshot,
|
||||
clearLastExecutionSnapshot,
|
||||
} = useExecutionStore()
|
||||
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
|
||||
const executionStream = useExecutionStream()
|
||||
const currentChatExecutionIdRef = useRef<string | null>(null)
|
||||
const isViewingDiff = useWorkflowDiffStore((state) => state.isShowingDiff)
|
||||
const addNotification = useNotificationStore((state) => state.addNotification)
|
||||
|
||||
/**
|
||||
* Validates debug state before performing debug operations
|
||||
@@ -669,7 +675,8 @@ export function useWorkflowExecution() {
|
||||
onStream?: (se: StreamingExecution) => Promise<void>,
|
||||
executionId?: string,
|
||||
onBlockComplete?: (blockId: string, output: any) => Promise<void>,
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api'
|
||||
overrideTriggerType?: 'chat' | 'manual' | 'api',
|
||||
stopAfterBlockId?: string
|
||||
): Promise<ExecutionResult | StreamingExecution> => {
|
||||
// Use diff workflow for execution when available, regardless of canvas view state
|
||||
const executionWorkflowState = null as {
|
||||
@@ -868,8 +875,6 @@ export function useWorkflowExecution() {
|
||||
if (activeWorkflowId) {
|
||||
logger.info('Using server-side executor')
|
||||
|
||||
const executionId = uuidv4()
|
||||
|
||||
let executionResult: ExecutionResult = {
|
||||
success: false,
|
||||
output: {},
|
||||
@@ -879,6 +884,8 @@ export function useWorkflowExecution() {
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const streamedContent = new Map<string, string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
// Execute the workflow
|
||||
try {
|
||||
@@ -890,6 +897,7 @@ export function useWorkflowExecution() {
|
||||
triggerType: overrideTriggerType || 'manual',
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
stopAfterBlockId,
|
||||
workflowStateOverride: executionWorkflowState
|
||||
? {
|
||||
blocks: executionWorkflowState.blocks,
|
||||
@@ -913,45 +921,28 @@ export function useWorkflowExecution() {
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
|
||||
// Add entry to terminal immediately with isRunning=true
|
||||
const startedAt = new Date().toISOString()
|
||||
addConsole({
|
||||
input: {},
|
||||
output: undefined,
|
||||
success: undefined,
|
||||
durationMs: undefined,
|
||||
startedAt,
|
||||
endedAt: undefined,
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
logger.info('onBlockCompleted received:', { data })
|
||||
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
// Edges already tracked in onBlockStarted, no need to track again
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
// Accumulate block log for the execution result
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
@@ -964,23 +955,24 @@ export function useWorkflowExecution() {
|
||||
endedAt,
|
||||
})
|
||||
|
||||
// Update existing console entry (created in onBlockStarted) with completion data
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
endedAt,
|
||||
isRunning: false,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
// Add to console
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: data.blockId,
|
||||
executionId: executionId || uuidv4(),
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
// Pass through iteration context for console pills
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
|
||||
// Call onBlockComplete callback if provided
|
||||
if (onBlockComplete) {
|
||||
@@ -1015,24 +1007,25 @@ export function useWorkflowExecution() {
|
||||
endedAt,
|
||||
})
|
||||
|
||||
// Update existing console entry (created in onBlockStarted) with error data
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
endedAt,
|
||||
isRunning: false,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
// Add error to console
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: data.blockId,
|
||||
executionId: executionId || uuidv4(),
|
||||
blockName: data.blockName,
|
||||
blockType: data.blockType,
|
||||
// Pass through iteration context for console pills
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onStreamChunk: (data) => {
|
||||
@@ -1078,6 +1071,53 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
logs: accumulatedBlockLogs,
|
||||
}
|
||||
|
||||
// Add trigger block to executed blocks so downstream blocks can use run-from-block
|
||||
if (data.success && startBlockId) {
|
||||
executedBlockIds.add(startBlockId)
|
||||
}
|
||||
|
||||
if (data.success && activeWorkflowId) {
|
||||
if (stopAfterBlockId) {
|
||||
const existingSnapshot = getLastExecutionSnapshot(activeWorkflowId)
|
||||
const mergedBlockStates = {
|
||||
...(existingSnapshot?.blockStates || {}),
|
||||
...Object.fromEntries(accumulatedBlockStates),
|
||||
}
|
||||
const mergedExecutedBlocks = new Set([
|
||||
...(existingSnapshot?.executedBlocks || []),
|
||||
...executedBlockIds,
|
||||
])
|
||||
const snapshot: SerializableExecutionState = {
|
||||
blockStates: mergedBlockStates,
|
||||
executedBlocks: Array.from(mergedExecutedBlocks),
|
||||
blockLogs: [...(existingSnapshot?.blockLogs || []), ...accumulatedBlockLogs],
|
||||
decisions: existingSnapshot?.decisions || { router: {}, condition: {} },
|
||||
completedLoops: existingSnapshot?.completedLoops || [],
|
||||
activeExecutionPath: Array.from(mergedExecutedBlocks),
|
||||
}
|
||||
setLastExecutionSnapshot(activeWorkflowId, snapshot)
|
||||
logger.info('Merged execution snapshot after run-until-block', {
|
||||
workflowId: activeWorkflowId,
|
||||
newBlocksExecuted: executedBlockIds.size,
|
||||
totalExecutedBlocks: mergedExecutedBlocks.size,
|
||||
})
|
||||
} else {
|
||||
const snapshot: SerializableExecutionState = {
|
||||
blockStates: Object.fromEntries(accumulatedBlockStates),
|
||||
executedBlocks: Array.from(executedBlockIds),
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
decisions: { router: {}, condition: {} },
|
||||
completedLoops: [],
|
||||
activeExecutionPath: Array.from(executedBlockIds),
|
||||
}
|
||||
setLastExecutionSnapshot(activeWorkflowId, snapshot)
|
||||
logger.info('Stored execution snapshot for run-from-block', {
|
||||
workflowId: activeWorkflowId,
|
||||
executedBlocksCount: executedBlockIds.size,
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1111,7 +1151,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: new Date().toISOString(),
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: 'validation',
|
||||
executionId,
|
||||
executionId: executionId || uuidv4(),
|
||||
blockName: 'Workflow Validation',
|
||||
blockType: 'validation',
|
||||
})
|
||||
@@ -1380,11 +1420,6 @@ export function useWorkflowExecution() {
|
||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
||||
currentChatExecutionIdRef.current = null
|
||||
|
||||
// Mark all running entries as canceled in the terminal
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
}
|
||||
|
||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
||||
setIsExecuting(false)
|
||||
setIsDebugging(false)
|
||||
@@ -1401,10 +1436,332 @@ export function useWorkflowExecution() {
|
||||
setIsExecuting,
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
activeWorkflowId,
|
||||
cancelRunningEntries,
|
||||
])
|
||||
|
||||
/**
|
||||
* Handles running workflow from a specific block using cached outputs
|
||||
*/
|
||||
const handleRunFromBlock = useCallback(
|
||||
async (blockId: string, workflowId: string) => {
|
||||
const snapshot = getLastExecutionSnapshot(workflowId)
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
|
||||
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
|
||||
const isSourceSatisfied = (sourceId: string) => {
|
||||
if (snapshot?.executedBlocks.includes(sourceId)) return true
|
||||
// Check if source is a trigger (has no incoming edges itself)
|
||||
const sourceIncomingEdges = workflowEdges.filter((edge) => edge.target === sourceId)
|
||||
return sourceIncomingEdges.length === 0
|
||||
}
|
||||
|
||||
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
|
||||
if (!snapshot && !isTriggerBlock) {
|
||||
logger.error('No execution snapshot available for run-from-block', { workflowId, blockId })
|
||||
return
|
||||
}
|
||||
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock || incomingEdges.every((edge) => isSourceSatisfied(edge.source))
|
||||
|
||||
if (!dependenciesSatisfied) {
|
||||
logger.error('Upstream dependencies not satisfied for run-from-block', {
|
||||
workflowId,
|
||||
blockId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// For trigger blocks, always use empty snapshot to prevent stale data from different
|
||||
// execution paths from being resolved. For non-trigger blocks, use the existing snapshot.
|
||||
const emptySnapshot: SerializableExecutionState = {
|
||||
blockStates: {},
|
||||
executedBlocks: [],
|
||||
blockLogs: [],
|
||||
decisions: { router: {}, condition: {} },
|
||||
completedLoops: [],
|
||||
activeExecutionPath: [],
|
||||
}
|
||||
const effectiveSnapshot: SerializableExecutionState = isTriggerBlock
|
||||
? emptySnapshot
|
||||
: snapshot || emptySnapshot
|
||||
|
||||
// Extract mock payload for trigger blocks
|
||||
let workflowInput: any
|
||||
if (isTriggerBlock) {
|
||||
const workflowBlocks = useWorkflowStore.getState().blocks
|
||||
const mergedStates = mergeSubblockState(workflowBlocks, workflowId)
|
||||
const candidates = resolveStartCandidates(mergedStates, { execution: 'manual' })
|
||||
const candidate = candidates.find((c) => c.blockId === blockId)
|
||||
|
||||
if (candidate) {
|
||||
if (triggerNeedsMockPayload(candidate)) {
|
||||
workflowInput = extractTriggerMockPayload(candidate)
|
||||
} else if (
|
||||
candidate.path === StartBlockPath.SPLIT_API ||
|
||||
candidate.path === StartBlockPath.SPLIT_INPUT ||
|
||||
candidate.path === StartBlockPath.UNIFIED
|
||||
) {
|
||||
const inputFormatValue = candidate.block.subBlocks?.inputFormat?.value
|
||||
if (Array.isArray(inputFormatValue)) {
|
||||
const testInput: Record<string, any> = {}
|
||||
inputFormatValue.forEach((field: any) => {
|
||||
if (field && typeof field === 'object' && field.name && field.value !== undefined) {
|
||||
testInput[field.name] = coerceValue(field.type, field.value)
|
||||
}
|
||||
})
|
||||
if (Object.keys(testInput).length > 0) {
|
||||
workflowInput = testInput
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Fallback: block is trigger by position but not classified as start candidate
|
||||
const block = mergedStates[blockId]
|
||||
if (block) {
|
||||
const blockConfig = getBlock(block.type)
|
||||
const hasTriggers = blockConfig?.triggers?.available?.length
|
||||
|
||||
if (hasTriggers || block.triggerMode) {
|
||||
workflowInput = extractTriggerMockPayload({
|
||||
blockId,
|
||||
block,
|
||||
path: StartBlockPath.EXTERNAL_TRIGGER,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setIsExecuting(true)
|
||||
const executionId = uuidv4()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
try {
|
||||
await executionStream.executeFromBlock({
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
input: workflowInput,
|
||||
callbacks: {
|
||||
onBlockStarted: (data) => {
|
||||
activeBlocksSet.add(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockError: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
|
||||
const endedAt = new Date().toISOString()
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName,
|
||||
blockType: data.blockType,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
// Add the start block (trigger) to executed blocks
|
||||
executedBlockIds.add(blockId)
|
||||
|
||||
const mergedBlockStates: Record<string, BlockState> = {
|
||||
...effectiveSnapshot.blockStates,
|
||||
}
|
||||
for (const [bId, state] of accumulatedBlockStates) {
|
||||
mergedBlockStates[bId] = state
|
||||
}
|
||||
|
||||
const mergedExecutedBlocks = new Set([
|
||||
...effectiveSnapshot.executedBlocks,
|
||||
...executedBlockIds,
|
||||
])
|
||||
|
||||
const updatedSnapshot: SerializableExecutionState = {
|
||||
...effectiveSnapshot,
|
||||
blockStates: mergedBlockStates,
|
||||
executedBlocks: Array.from(mergedExecutedBlocks),
|
||||
blockLogs: [...effectiveSnapshot.blockLogs, ...accumulatedBlockLogs],
|
||||
activeExecutionPath: Array.from(mergedExecutedBlocks),
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
const isWorkflowModified =
|
||||
data.error?.includes('Block not found in workflow') ||
|
||||
data.error?.includes('Upstream dependency not executed')
|
||||
|
||||
if (isWorkflowModified) {
|
||||
clearLastExecutionSnapshot(workflowId)
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message:
|
||||
'Workflow was modified. Run the workflow again to enable running from block.',
|
||||
workflowId,
|
||||
})
|
||||
} else {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: data.error || 'Run from block failed',
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if ((error as Error).name !== 'AbortError') {
|
||||
logger.error('Run-from-block failed:', error)
|
||||
}
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
clearLastExecutionSnapshot,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
addNotification,
|
||||
addConsole,
|
||||
executionStream,
|
||||
]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles running workflow until a specific block (stops after that block completes)
|
||||
*/
|
||||
const handleRunUntilBlock = useCallback(
|
||||
async (blockId: string, workflowId: string) => {
|
||||
if (!workflowId || workflowId !== activeWorkflowId) {
|
||||
logger.error('Invalid workflow ID for run-until-block', { workflowId, activeWorkflowId })
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('Starting run-until-block execution', { workflowId, stopAfterBlockId: blockId })
|
||||
|
||||
setExecutionResult(null)
|
||||
setIsExecuting(true)
|
||||
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
undefined,
|
||||
undefined,
|
||||
executionId,
|
||||
undefined,
|
||||
'manual',
|
||||
blockId
|
||||
)
|
||||
if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorResult = handleExecutionError(error, { executionId })
|
||||
return errorResult
|
||||
} finally {
|
||||
setIsExecuting(false)
|
||||
setIsDebugging(false)
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||
)
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
@@ -1415,5 +1772,7 @@ export function useWorkflowExecution() {
|
||||
handleResumeDebug,
|
||||
handleCancelDebug,
|
||||
handleCancelExecution,
|
||||
handleRunFromBlock,
|
||||
handleRunUntilBlock,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,7 @@ import {
|
||||
useCurrentWorkflow,
|
||||
useNodeUtilities,
|
||||
useShiftSelectionLock,
|
||||
useWorkflowExecution,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import {
|
||||
calculateContainerDimensions,
|
||||
@@ -302,6 +303,8 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
const showTrainingModal = useCopilotTrainingStore((state) => state.showModal)
|
||||
|
||||
const { handleRunFromBlock, handleRunUntilBlock } = useWorkflowExecution()
|
||||
|
||||
const snapToGridSize = useSnapToGridSize()
|
||||
const snapToGrid = snapToGridSize > 0
|
||||
|
||||
@@ -733,13 +736,16 @@ const WorkflowContent = React.memo(() => {
|
||||
[collaborativeBatchAddBlocks, setSelectedEdges, setPendingSelection]
|
||||
)
|
||||
|
||||
const { activeBlockIds, pendingBlocks, isDebugging } = useExecutionStore(
|
||||
useShallow((state) => ({
|
||||
activeBlockIds: state.activeBlockIds,
|
||||
pendingBlocks: state.pendingBlocks,
|
||||
isDebugging: state.isDebugging,
|
||||
}))
|
||||
)
|
||||
const { activeBlockIds, pendingBlocks, isDebugging, isExecuting, getLastExecutionSnapshot } =
|
||||
useExecutionStore(
|
||||
useShallow((state) => ({
|
||||
activeBlockIds: state.activeBlockIds,
|
||||
pendingBlocks: state.pendingBlocks,
|
||||
isDebugging: state.isDebugging,
|
||||
isExecuting: state.isExecuting,
|
||||
getLastExecutionSnapshot: state.getLastExecutionSnapshot,
|
||||
}))
|
||||
)
|
||||
|
||||
const [dragStartParentId, setDragStartParentId] = useState<string | null>(null)
|
||||
|
||||
@@ -1102,6 +1108,50 @@ const WorkflowContent = React.memo(() => {
|
||||
}
|
||||
}, [contextMenuBlocks])
|
||||
|
||||
const handleContextRunFromBlock = useCallback(() => {
|
||||
if (contextMenuBlocks.length !== 1) return
|
||||
const blockId = contextMenuBlocks[0].id
|
||||
handleRunFromBlock(blockId, workflowIdParam)
|
||||
}, [contextMenuBlocks, workflowIdParam, handleRunFromBlock])
|
||||
|
||||
const handleContextRunUntilBlock = useCallback(() => {
|
||||
if (contextMenuBlocks.length !== 1) return
|
||||
const blockId = contextMenuBlocks[0].id
|
||||
handleRunUntilBlock(blockId, workflowIdParam)
|
||||
}, [contextMenuBlocks, workflowIdParam, handleRunUntilBlock])
|
||||
|
||||
const runFromBlockState = useMemo(() => {
|
||||
if (contextMenuBlocks.length !== 1) {
|
||||
return { canRun: false, reason: undefined }
|
||||
}
|
||||
const block = contextMenuBlocks[0]
|
||||
const snapshot = getLastExecutionSnapshot(workflowIdParam)
|
||||
const incomingEdges = edges.filter((edge) => edge.target === block.id)
|
||||
const isTriggerBlock = incomingEdges.length === 0
|
||||
|
||||
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
|
||||
const isSourceSatisfied = (sourceId: string) => {
|
||||
if (snapshot?.executedBlocks.includes(sourceId)) return true
|
||||
// Check if source is a trigger (has no incoming edges itself)
|
||||
const sourceIncomingEdges = edges.filter((edge) => edge.target === sourceId)
|
||||
return sourceIncomingEdges.length === 0
|
||||
}
|
||||
|
||||
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
|
||||
const dependenciesSatisfied =
|
||||
isTriggerBlock || (snapshot && incomingEdges.every((edge) => isSourceSatisfied(edge.source)))
|
||||
const isNoteBlock = block.type === 'note'
|
||||
const isInsideSubflow =
|
||||
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||
|
||||
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
|
||||
if (!dependenciesSatisfied) return { canRun: false, reason: 'Run upstream blocks first' }
|
||||
if (isNoteBlock) return { canRun: false, reason: undefined }
|
||||
if (isExecuting) return { canRun: false, reason: undefined }
|
||||
|
||||
return { canRun: true, reason: undefined }
|
||||
}, [contextMenuBlocks, edges, workflowIdParam, getLastExecutionSnapshot, isExecuting])
|
||||
|
||||
const handleContextAddBlock = useCallback(() => {
|
||||
useSearchModalStore.getState().open()
|
||||
}, [])
|
||||
@@ -2302,33 +2352,12 @@ const WorkflowContent = React.memo(() => {
|
||||
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
|
||||
}, [blocks, edgesForDisplay, getNodeAbsolutePosition, collaborativeBatchUpdateParent])
|
||||
|
||||
/** Handles node changes - applies changes and resolves parent-child selection conflicts. */
|
||||
const onNodesChange = useCallback(
|
||||
(changes: NodeChange[]) => {
|
||||
selectedIdsRef.current = null
|
||||
setDisplayNodes((nds) => {
|
||||
const updated = applyNodeChanges(changes, nds)
|
||||
const hasSelectionChange = changes.some((c) => c.type === 'select')
|
||||
if (!hasSelectionChange) return updated
|
||||
const resolved = resolveParentChildSelectionConflicts(updated, blocks)
|
||||
selectedIdsRef.current = resolved.filter((node) => node.selected).map((node) => node.id)
|
||||
return resolved
|
||||
})
|
||||
const selectedIds = selectedIdsRef.current as string[] | null
|
||||
if (selectedIds !== null) {
|
||||
syncPanelWithSelection(selectedIds)
|
||||
}
|
||||
},
|
||||
[blocks]
|
||||
)
|
||||
|
||||
/**
|
||||
* Updates container dimensions in displayNodes during drag.
|
||||
* This allows live resizing of containers as their children are dragged.
|
||||
* Updates container dimensions in displayNodes during drag or keyboard movement.
|
||||
*/
|
||||
const updateContainerDimensionsDuringDrag = useCallback(
|
||||
(draggedNodeId: string, draggedNodePosition: { x: number; y: number }) => {
|
||||
const parentId = blocks[draggedNodeId]?.data?.parentId
|
||||
const updateContainerDimensionsDuringMove = useCallback(
|
||||
(movedNodeId: string, movedNodePosition: { x: number; y: number }) => {
|
||||
const parentId = blocks[movedNodeId]?.data?.parentId
|
||||
if (!parentId) return
|
||||
|
||||
setDisplayNodes((currentNodes) => {
|
||||
@@ -2336,7 +2365,7 @@ const WorkflowContent = React.memo(() => {
|
||||
if (childNodes.length === 0) return currentNodes
|
||||
|
||||
const childPositions = childNodes.map((node) => {
|
||||
const nodePosition = node.id === draggedNodeId ? draggedNodePosition : node.position
|
||||
const nodePosition = node.id === movedNodeId ? movedNodePosition : node.position
|
||||
const { width, height } = getBlockDimensions(node.id)
|
||||
return { x: nodePosition.x, y: nodePosition.y, width, height }
|
||||
})
|
||||
@@ -2367,6 +2396,55 @@ const WorkflowContent = React.memo(() => {
|
||||
[blocks, getBlockDimensions]
|
||||
)
|
||||
|
||||
/** Handles node changes - applies changes and resolves parent-child selection conflicts. */
|
||||
const onNodesChange = useCallback(
|
||||
(changes: NodeChange[]) => {
|
||||
selectedIdsRef.current = null
|
||||
setDisplayNodes((nds) => {
|
||||
const updated = applyNodeChanges(changes, nds)
|
||||
const hasSelectionChange = changes.some((c) => c.type === 'select')
|
||||
if (!hasSelectionChange) return updated
|
||||
const resolved = resolveParentChildSelectionConflicts(updated, blocks)
|
||||
selectedIdsRef.current = resolved.filter((node) => node.selected).map((node) => node.id)
|
||||
return resolved
|
||||
})
|
||||
const selectedIds = selectedIdsRef.current as string[] | null
|
||||
if (selectedIds !== null) {
|
||||
syncPanelWithSelection(selectedIds)
|
||||
}
|
||||
|
||||
// Handle position changes (e.g., from keyboard arrow key movement)
|
||||
// Update container dimensions when child nodes are moved and persist to backend
|
||||
// Only persist if not in a drag operation (drag-end is handled by onNodeDragStop)
|
||||
const isInDragOperation =
|
||||
getDragStartPosition() !== null || multiNodeDragStartRef.current.size > 0
|
||||
const keyboardPositionUpdates: Array<{ id: string; position: { x: number; y: number } }> = []
|
||||
for (const change of changes) {
|
||||
if (
|
||||
change.type === 'position' &&
|
||||
!change.dragging &&
|
||||
'position' in change &&
|
||||
change.position
|
||||
) {
|
||||
updateContainerDimensionsDuringMove(change.id, change.position)
|
||||
if (!isInDragOperation) {
|
||||
keyboardPositionUpdates.push({ id: change.id, position: change.position })
|
||||
}
|
||||
}
|
||||
}
|
||||
// Persist keyboard movements to backend for collaboration sync
|
||||
if (keyboardPositionUpdates.length > 0) {
|
||||
collaborativeBatchUpdatePositions(keyboardPositionUpdates)
|
||||
}
|
||||
},
|
||||
[
|
||||
blocks,
|
||||
updateContainerDimensionsDuringMove,
|
||||
collaborativeBatchUpdatePositions,
|
||||
getDragStartPosition,
|
||||
]
|
||||
)
|
||||
|
||||
/**
|
||||
* Effect to resize loops when nodes change (add/remove/position change).
|
||||
* Runs on structural changes only - not during drag (position-only changes).
|
||||
@@ -2611,7 +2689,7 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// If the node is inside a container, update container dimensions during drag
|
||||
if (currentParentId) {
|
||||
updateContainerDimensionsDuringDrag(node.id, node.position)
|
||||
updateContainerDimensionsDuringMove(node.id, node.position)
|
||||
}
|
||||
|
||||
// Check if this is a starter block - starter blocks should never be in containers
|
||||
@@ -2728,7 +2806,7 @@ const WorkflowContent = React.memo(() => {
|
||||
blocks,
|
||||
getNodeAbsolutePosition,
|
||||
getNodeDepth,
|
||||
updateContainerDimensionsDuringDrag,
|
||||
updateContainerDimensionsDuringMove,
|
||||
highlightContainerNode,
|
||||
]
|
||||
)
|
||||
@@ -3418,11 +3496,19 @@ const WorkflowContent = React.memo(() => {
|
||||
onRemoveFromSubflow={handleContextRemoveFromSubflow}
|
||||
onOpenEditor={handleContextOpenEditor}
|
||||
onRename={handleContextRename}
|
||||
onRunFromBlock={handleContextRunFromBlock}
|
||||
onRunUntilBlock={handleContextRunUntilBlock}
|
||||
hasClipboard={hasClipboard()}
|
||||
showRemoveFromSubflow={contextMenuBlocks.some(
|
||||
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
|
||||
)}
|
||||
canRunFromBlock={runFromBlockState.canRun}
|
||||
disableEdit={!effectivePermissions.canEdit}
|
||||
isExecuting={isExecuting}
|
||||
isPositionalTrigger={
|
||||
contextMenuBlocks.length === 1 &&
|
||||
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
|
||||
}
|
||||
/>
|
||||
|
||||
<CanvasMenu
|
||||
|
||||
@@ -1141,17 +1141,15 @@ function PreviewEditorContent({
|
||||
<div className='relative flex h-full w-80 flex-col overflow-hidden border-[var(--border)] border-l bg-[var(--surface-1)]'>
|
||||
{/* Header - styled like editor */}
|
||||
<div className='mx-[-1px] flex flex-shrink-0 items-center gap-[8px] rounded-b-[4px] border-[var(--border)] border-x border-b bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
||||
{block.type !== 'note' && (
|
||||
<div
|
||||
className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px]'
|
||||
style={{ backgroundColor: blockConfig.bgColor }}
|
||||
>
|
||||
<IconComponent
|
||||
icon={blockConfig.icon}
|
||||
className='h-[12px] w-[12px] text-[var(--white)]'
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px]'
|
||||
style={{ backgroundColor: blockConfig.bgColor }}
|
||||
>
|
||||
<IconComponent
|
||||
icon={blockConfig.icon}
|
||||
className='h-[12px] w-[12px] text-[var(--white)]'
|
||||
/>
|
||||
</div>
|
||||
<span className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||
{block.name || blockConfig.name}
|
||||
</span>
|
||||
|
||||
@@ -411,9 +411,8 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
|
||||
|
||||
const IconComponent = blockConfig.icon
|
||||
const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger
|
||||
const isNoteBlock = type === 'note'
|
||||
|
||||
const shouldShowDefaultHandles = !isStarterOrTrigger && !isNoteBlock
|
||||
const shouldShowDefaultHandles = !isStarterOrTrigger
|
||||
const hasSubBlocks = visibleSubBlocks.length > 0
|
||||
const hasContentBelowHeader =
|
||||
type === 'condition'
|
||||
@@ -575,8 +574,8 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Source and error handles for non-condition/router/note blocks */}
|
||||
{type !== 'condition' && type !== 'router_v2' && type !== 'response' && !isNoteBlock && (
|
||||
{/* Source and error handles for non-condition/router blocks */}
|
||||
{type !== 'condition' && type !== 'router_v2' && type !== 'response' && (
|
||||
<>
|
||||
<Handle
|
||||
type='source'
|
||||
|
||||
@@ -406,11 +406,9 @@ export function PreviewWorkflow({
|
||||
}
|
||||
}
|
||||
|
||||
const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock'
|
||||
|
||||
nodeArray.push({
|
||||
id: blockId,
|
||||
type: nodeType,
|
||||
type: 'workflowBlock',
|
||||
position: absolutePosition,
|
||||
draggable: false,
|
||||
zIndex: block.data?.parentId ? 10 : undefined,
|
||||
|
||||
@@ -2,9 +2,10 @@
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Command } from 'cmdk'
|
||||
import { BookOpen, Layout, ScrollText } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Settings } from 'lucide-react'
|
||||
import { useParams, useRouter } from 'next/navigation'
|
||||
import { createPortal } from 'react-dom'
|
||||
import { Library } from '@/components/emcn'
|
||||
import { useBrandConfig } from '@/lib/branding/branding'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils'
|
||||
@@ -15,6 +16,7 @@ import type {
|
||||
SearchDocItem,
|
||||
SearchToolOperationItem,
|
||||
} from '@/stores/modals/search/types'
|
||||
import { useSettingsModalStore } from '@/stores/modals/settings/store'
|
||||
|
||||
interface SearchModalProps {
|
||||
open: boolean
|
||||
@@ -43,7 +45,8 @@ interface PageItem {
|
||||
id: string
|
||||
name: string
|
||||
icon: React.ComponentType<{ className?: string }>
|
||||
href: string
|
||||
href?: string
|
||||
onClick?: () => void
|
||||
shortcut?: string
|
||||
}
|
||||
|
||||
@@ -61,6 +64,7 @@ export function SearchModal({
|
||||
const inputRef = useRef<HTMLInputElement>(null)
|
||||
const [search, setSearch] = useState('')
|
||||
const [mounted, setMounted] = useState(false)
|
||||
const openSettingsModal = useSettingsModalStore((state) => state.openModal)
|
||||
|
||||
useEffect(() => {
|
||||
setMounted(true)
|
||||
@@ -70,12 +74,16 @@ export function SearchModal({
|
||||
(state) => state.data
|
||||
)
|
||||
|
||||
const openHelpModal = useCallback(() => {
|
||||
window.dispatchEvent(new CustomEvent('open-help-modal'))
|
||||
}, [])
|
||||
|
||||
const pages = useMemo(
|
||||
(): PageItem[] => [
|
||||
{
|
||||
id: 'logs',
|
||||
name: 'Logs',
|
||||
icon: ScrollText,
|
||||
icon: Library,
|
||||
href: `/workspace/${workspaceId}/logs`,
|
||||
shortcut: '⌘⇧L',
|
||||
},
|
||||
@@ -86,13 +94,26 @@ export function SearchModal({
|
||||
href: `/workspace/${workspaceId}/templates`,
|
||||
},
|
||||
{
|
||||
id: 'docs',
|
||||
name: 'Docs',
|
||||
icon: BookOpen,
|
||||
href: brand.documentationUrl || 'https://docs.sim.ai/',
|
||||
id: 'knowledge-base',
|
||||
name: 'Knowledge Base',
|
||||
icon: Database,
|
||||
href: `/workspace/${workspaceId}/knowledge`,
|
||||
},
|
||||
{
|
||||
id: 'help',
|
||||
name: 'Help',
|
||||
icon: HelpCircle,
|
||||
onClick: openHelpModal,
|
||||
},
|
||||
{
|
||||
id: 'settings',
|
||||
name: 'Settings',
|
||||
icon: Settings,
|
||||
onClick: openSettingsModal,
|
||||
shortcut: '⌘,',
|
||||
},
|
||||
],
|
||||
[workspaceId, brand.documentationUrl]
|
||||
[workspaceId, openHelpModal, openSettingsModal]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -179,10 +200,14 @@ export function SearchModal({
|
||||
|
||||
const handlePageSelect = useCallback(
|
||||
(page: PageItem) => {
|
||||
if (page.href.startsWith('http')) {
|
||||
window.open(page.href, '_blank', 'noopener,noreferrer')
|
||||
} else {
|
||||
router.push(page.href)
|
||||
if (page.onClick) {
|
||||
page.onClick()
|
||||
} else if (page.href) {
|
||||
if (page.href.startsWith('http')) {
|
||||
window.open(page.href, '_blank', 'noopener,noreferrer')
|
||||
} else {
|
||||
router.push(page.href)
|
||||
}
|
||||
}
|
||||
onOpenChange(false)
|
||||
},
|
||||
@@ -269,7 +294,7 @@ export function SearchModal({
|
||||
{blocks.map((block) => (
|
||||
<CommandItem
|
||||
key={block.id}
|
||||
value={block.name}
|
||||
value={`${block.name} block-${block.id}`}
|
||||
keywords={[block.description]}
|
||||
onSelect={() => handleBlockSelect(block, 'block')}
|
||||
icon={block.icon}
|
||||
@@ -287,7 +312,7 @@ export function SearchModal({
|
||||
{tools.map((tool) => (
|
||||
<CommandItem
|
||||
key={tool.id}
|
||||
value={tool.name}
|
||||
value={`${tool.name} tool-${tool.id}`}
|
||||
keywords={[tool.description]}
|
||||
onSelect={() => handleBlockSelect(tool, 'tool')}
|
||||
icon={tool.icon}
|
||||
@@ -305,7 +330,7 @@ export function SearchModal({
|
||||
{triggers.map((trigger) => (
|
||||
<CommandItem
|
||||
key={trigger.id}
|
||||
value={trigger.name}
|
||||
value={`${trigger.name} trigger-${trigger.id}`}
|
||||
keywords={[trigger.description]}
|
||||
onSelect={() => handleBlockSelect(trigger, 'trigger')}
|
||||
icon={trigger.icon}
|
||||
@@ -323,7 +348,7 @@ export function SearchModal({
|
||||
{workflows.map((workflow) => (
|
||||
<Command.Item
|
||||
key={workflow.id}
|
||||
value={workflow.name}
|
||||
value={`${workflow.name} workflow-${workflow.id}`}
|
||||
onSelect={() => handleWorkflowSelect(workflow)}
|
||||
className='group flex h-[28px] w-full cursor-pointer items-center gap-[8px] rounded-[6px] px-[10px] text-left text-[15px] aria-selected:bg-[var(--border)] aria-selected:shadow-sm data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50'
|
||||
>
|
||||
@@ -345,7 +370,7 @@ export function SearchModal({
|
||||
{toolOperations.map((op) => (
|
||||
<CommandItem
|
||||
key={op.id}
|
||||
value={op.searchValue}
|
||||
value={`${op.searchValue} operation-${op.id}`}
|
||||
keywords={op.keywords}
|
||||
onSelect={() => handleToolOperationSelect(op)}
|
||||
icon={op.icon}
|
||||
@@ -363,7 +388,7 @@ export function SearchModal({
|
||||
{workspaces.map((workspace) => (
|
||||
<Command.Item
|
||||
key={workspace.id}
|
||||
value={workspace.name}
|
||||
value={`${workspace.name} workspace-${workspace.id}`}
|
||||
onSelect={() => handleWorkspaceSelect(workspace)}
|
||||
className='group flex h-[28px] w-full cursor-pointer items-center gap-[8px] rounded-[6px] px-[10px] text-left text-[15px] aria-selected:bg-[var(--border)] aria-selected:shadow-sm data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50'
|
||||
>
|
||||
@@ -381,7 +406,7 @@ export function SearchModal({
|
||||
{docs.map((doc) => (
|
||||
<CommandItem
|
||||
key={doc.id}
|
||||
value={`${doc.name} docs documentation`}
|
||||
value={`${doc.name} docs documentation doc-${doc.id}`}
|
||||
onSelect={() => handleDocSelect(doc)}
|
||||
icon={doc.icon}
|
||||
bgColor='#6B7280'
|
||||
@@ -400,7 +425,7 @@ export function SearchModal({
|
||||
return (
|
||||
<Command.Item
|
||||
key={page.id}
|
||||
value={page.name}
|
||||
value={`${page.name} page-${page.id}`}
|
||||
onSelect={() => handlePageSelect(page)}
|
||||
className='group flex h-[28px] w-full cursor-pointer items-center gap-[8px] rounded-[6px] px-[10px] text-left text-[15px] aria-selected:bg-[var(--border)] aria-selected:shadow-sm data-[disabled=true]:pointer-events-none data-[disabled=true]:opacity-50'
|
||||
>
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
.code-editor-theme .token.char,
|
||||
.code-editor-theme .token.builtin,
|
||||
.code-editor-theme .token.inserted {
|
||||
color: #b45309 !important;
|
||||
color: #dc2626 !important;
|
||||
}
|
||||
|
||||
.code-editor-theme .token.operator,
|
||||
@@ -49,7 +49,7 @@
|
||||
.code-editor-theme .token.atrule,
|
||||
.code-editor-theme .token.attr-value,
|
||||
.code-editor-theme .token.keyword {
|
||||
color: #2f55ff !important;
|
||||
color: #2563eb !important;
|
||||
}
|
||||
|
||||
.code-editor-theme .token.function,
|
||||
@@ -119,7 +119,7 @@
|
||||
.dark .code-editor-theme .token.atrule,
|
||||
.dark .code-editor-theme .token.attr-value,
|
||||
.dark .code-editor-theme .token.keyword {
|
||||
color: #2fa1ff !important;
|
||||
color: #4db8ff !important;
|
||||
}
|
||||
|
||||
.dark .code-editor-theme .token.function,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,15 @@ export interface DAG {
|
||||
parallelConfigs: Map<string, SerializedParallel>
|
||||
}
|
||||
|
||||
export interface DAGBuildOptions {
|
||||
/** Trigger block ID to start path construction from */
|
||||
triggerBlockId?: string
|
||||
/** Saved incoming edges from snapshot for resumption */
|
||||
savedIncomingEdges?: Record<string, string[]>
|
||||
/** Include all enabled blocks instead of only those reachable from trigger */
|
||||
includeAllBlocks?: boolean
|
||||
}
|
||||
|
||||
export class DAGBuilder {
|
||||
private pathConstructor = new PathConstructor()
|
||||
private loopConstructor = new LoopConstructor()
|
||||
@@ -40,11 +49,9 @@ export class DAGBuilder {
|
||||
private nodeConstructor = new NodeConstructor()
|
||||
private edgeConstructor = new EdgeConstructor()
|
||||
|
||||
build(
|
||||
workflow: SerializedWorkflow,
|
||||
triggerBlockId?: string,
|
||||
savedIncomingEdges?: Record<string, string[]>
|
||||
): DAG {
|
||||
build(workflow: SerializedWorkflow, options: DAGBuildOptions = {}): DAG {
|
||||
const { triggerBlockId, savedIncomingEdges, includeAllBlocks } = options
|
||||
|
||||
const dag: DAG = {
|
||||
nodes: new Map(),
|
||||
loopConfigs: new Map(),
|
||||
@@ -53,7 +60,7 @@ export class DAGBuilder {
|
||||
|
||||
this.initializeConfigs(workflow, dag)
|
||||
|
||||
const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId)
|
||||
const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId, includeAllBlocks)
|
||||
|
||||
this.loopConstructor.execute(dag, reachableBlocks)
|
||||
this.parallelConstructor.execute(dag, reachableBlocks)
|
||||
|
||||
@@ -207,6 +207,7 @@ export class EdgeConstructor {
|
||||
for (const connection of workflow.connections) {
|
||||
let { source, target } = connection
|
||||
const originalSource = source
|
||||
const originalTarget = target
|
||||
let sourceHandle = this.generateSourceHandle(
|
||||
source,
|
||||
target,
|
||||
@@ -257,14 +258,14 @@ export class EdgeConstructor {
|
||||
target = sentinelStartId
|
||||
}
|
||||
|
||||
if (loopSentinelStartId) {
|
||||
this.addEdge(dag, loopSentinelStartId, target, EDGE.LOOP_EXIT, targetHandle)
|
||||
}
|
||||
|
||||
if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (loopSentinelStartId && !blocksInLoops.has(originalTarget)) {
|
||||
this.addEdge(dag, loopSentinelStartId, target, EDGE.LOOP_EXIT, targetHandle)
|
||||
}
|
||||
|
||||
if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -6,7 +6,16 @@ import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
const logger = createLogger('PathConstructor')
|
||||
|
||||
export class PathConstructor {
|
||||
execute(workflow: SerializedWorkflow, triggerBlockId?: string): Set<string> {
|
||||
execute(
|
||||
workflow: SerializedWorkflow,
|
||||
triggerBlockId?: string,
|
||||
includeAllBlocks?: boolean
|
||||
): Set<string> {
|
||||
// For run-from-block mode, include all enabled blocks regardless of trigger reachability
|
||||
if (includeAllBlocks) {
|
||||
return this.getAllEnabledBlocks(workflow)
|
||||
}
|
||||
|
||||
const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId)
|
||||
|
||||
if (!resolvedTriggerId) {
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
containsUserFileWithMetadata,
|
||||
hydrateUserFilesWithBase64,
|
||||
} from '@/lib/uploads/utils/user-file-base64.server'
|
||||
import { sanitizeInputFormat, sanitizeTools } from '@/lib/workflows/comparison/normalize'
|
||||
import {
|
||||
BlockType,
|
||||
buildResumeApiUrl,
|
||||
@@ -34,6 +35,7 @@ import { validateBlockType } from '@/executor/utils/permission-check'
|
||||
import type { VariableResolver } from '@/executor/variables/resolver'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('BlockExecutor')
|
||||
|
||||
@@ -87,7 +89,7 @@ export class BlockExecutor {
|
||||
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
|
||||
|
||||
if (blockLog) {
|
||||
blockLog.input = this.parseJsonInputs(resolvedInputs)
|
||||
blockLog.input = this.sanitizeInputsForLog(resolvedInputs)
|
||||
}
|
||||
} catch (error) {
|
||||
cleanupSelfReference?.()
|
||||
@@ -162,7 +164,7 @@ export class BlockExecutor {
|
||||
ctx,
|
||||
node,
|
||||
block,
|
||||
this.parseJsonInputs(resolvedInputs),
|
||||
this.sanitizeInputsForLog(resolvedInputs),
|
||||
displayOutput,
|
||||
duration
|
||||
)
|
||||
@@ -241,7 +243,7 @@ export class BlockExecutor {
|
||||
blockLog.durationMs = duration
|
||||
blockLog.success = false
|
||||
blockLog.error = errorMessage
|
||||
blockLog.input = this.parseJsonInputs(input)
|
||||
blockLog.input = this.sanitizeInputsForLog(input)
|
||||
blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
|
||||
}
|
||||
|
||||
@@ -260,7 +262,7 @@ export class BlockExecutor {
|
||||
ctx,
|
||||
node,
|
||||
block,
|
||||
this.parseJsonInputs(input),
|
||||
this.sanitizeInputsForLog(input),
|
||||
displayOutput,
|
||||
duration
|
||||
)
|
||||
@@ -352,29 +354,41 @@ export class BlockExecutor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse JSON string inputs to objects for log display only.
|
||||
* Attempts to parse any string that looks like JSON.
|
||||
* Sanitizes inputs for log display.
|
||||
* - Filters out system fields (UI-only, readonly, internal flags)
|
||||
* - Removes UI state from inputFormat items (e.g., collapsed)
|
||||
* - Parses JSON strings to objects for readability
|
||||
* Returns a new object - does not mutate the original inputs.
|
||||
*/
|
||||
private parseJsonInputs(inputs: Record<string, any>): Record<string, any> {
|
||||
let result = inputs
|
||||
let hasChanges = false
|
||||
private sanitizeInputsForLog(inputs: Record<string, any>): Record<string, any> {
|
||||
const result: Record<string, any> = {}
|
||||
|
||||
for (const [key, value] of Object.entries(inputs)) {
|
||||
// isJSONString is a quick heuristic (checks for { or [), not a validator.
|
||||
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
|
||||
if (typeof value !== 'string' || !isJSONString(value)) {
|
||||
if (SYSTEM_SUBBLOCK_IDS.includes(key) || key === 'triggerMode') {
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
if (!hasChanges) {
|
||||
result = { ...inputs }
|
||||
hasChanges = true
|
||||
if (key === 'inputFormat' && Array.isArray(value)) {
|
||||
result[key] = sanitizeInputFormat(value)
|
||||
continue
|
||||
}
|
||||
|
||||
if (key === 'tools' && Array.isArray(value)) {
|
||||
result[key] = sanitizeTools(value)
|
||||
continue
|
||||
}
|
||||
|
||||
// isJSONString is a quick heuristic (checks for { or [), not a validator.
|
||||
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
|
||||
if (typeof value === 'string' && isJSONString(value)) {
|
||||
try {
|
||||
result[key] = JSON.parse(value.trim())
|
||||
} catch {
|
||||
// Not valid JSON, keep original string
|
||||
result[key] = value
|
||||
}
|
||||
result[key] = JSON.parse(value.trim())
|
||||
} catch {
|
||||
// Not valid JSON, keep original string
|
||||
} else {
|
||||
result[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -77,15 +77,16 @@ export class EdgeManager {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if any deactivation targets that previously received an activated edge are now ready
|
||||
for (const { target } of edgesToDeactivate) {
|
||||
if (
|
||||
!readyNodes.includes(target) &&
|
||||
!activatedTargets.includes(target) &&
|
||||
this.nodesWithActivatedEdge.has(target) &&
|
||||
this.isTargetReady(target)
|
||||
) {
|
||||
readyNodes.push(target)
|
||||
if (output.selectedRoute !== EDGE.LOOP_EXIT && output.selectedRoute !== EDGE.PARALLEL_EXIT) {
|
||||
for (const { target } of edgesToDeactivate) {
|
||||
if (
|
||||
!readyNodes.includes(target) &&
|
||||
!activatedTargets.includes(target) &&
|
||||
this.nodesWithActivatedEdge.has(target) &&
|
||||
this.isTargetReady(target)
|
||||
) {
|
||||
readyNodes.push(target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ export class ExecutionEngine {
|
||||
private allowResumeTriggers: boolean
|
||||
private cancelledFlag = false
|
||||
private errorFlag = false
|
||||
private stoppedEarlyFlag = false
|
||||
private executionError: Error | null = null
|
||||
private lastCancellationCheck = 0
|
||||
private readonly useRedisCancellation: boolean
|
||||
@@ -105,7 +106,7 @@ export class ExecutionEngine {
|
||||
this.initializeQueue(triggerBlockId)
|
||||
|
||||
while (this.hasWork()) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag) {
|
||||
if ((await this.checkCancellation()) || this.errorFlag || this.stoppedEarlyFlag) {
|
||||
break
|
||||
}
|
||||
await this.processQueue()
|
||||
@@ -259,6 +260,16 @@ export class ExecutionEngine {
|
||||
}
|
||||
|
||||
private initializeQueue(triggerBlockId?: string): void {
|
||||
if (this.context.runFromBlockContext) {
|
||||
const { startBlockId } = this.context.runFromBlockContext
|
||||
logger.info('Initializing queue for run-from-block mode', {
|
||||
startBlockId,
|
||||
dirtySetSize: this.context.runFromBlockContext.dirtySet.size,
|
||||
})
|
||||
this.addToQueue(startBlockId)
|
||||
return
|
||||
}
|
||||
|
||||
const pendingBlocks = this.context.metadata.pendingBlocks
|
||||
const remainingEdges = (this.context.metadata as any).remainingEdges
|
||||
|
||||
@@ -385,11 +396,28 @@ export class ExecutionEngine {
|
||||
this.finalOutput = output
|
||||
}
|
||||
|
||||
if (this.context.stopAfterBlockId === nodeId) {
|
||||
// For loop/parallel sentinels, only stop if the subflow has fully exited (all iterations done)
|
||||
// shouldContinue: true means more iterations, shouldExit: true means loop is done
|
||||
const shouldContinueLoop = output.shouldContinue === true
|
||||
if (!shouldContinueLoop) {
|
||||
logger.info('Stopping execution after target block', { nodeId })
|
||||
this.stoppedEarlyFlag = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false)
|
||||
|
||||
logger.info('Processing outgoing edges', {
|
||||
nodeId,
|
||||
outgoingEdgesCount: node.outgoingEdges.size,
|
||||
outgoingEdges: Array.from(node.outgoingEdges.entries()).map(([id, e]) => ({
|
||||
id,
|
||||
target: e.target,
|
||||
sourceHandle: e.sourceHandle,
|
||||
})),
|
||||
output,
|
||||
readyNodesCount: readyNodes.length,
|
||||
readyNodes,
|
||||
})
|
||||
|
||||
@@ -5,17 +5,31 @@ import { BlockExecutor } from '@/executor/execution/block-executor'
|
||||
import { EdgeManager } from '@/executor/execution/edge-manager'
|
||||
import { ExecutionEngine } from '@/executor/execution/engine'
|
||||
import { ExecutionState } from '@/executor/execution/state'
|
||||
import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
|
||||
import type {
|
||||
ContextExtensions,
|
||||
SerializableExecutionState,
|
||||
WorkflowInput,
|
||||
} from '@/executor/execution/types'
|
||||
import { createBlockHandlers } from '@/executor/handlers/registry'
|
||||
import { LoopOrchestrator } from '@/executor/orchestrators/loop'
|
||||
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
||||
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
|
||||
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
|
||||
import {
|
||||
computeExecutionSets,
|
||||
type RunFromBlockContext,
|
||||
resolveContainerToSentinelStart,
|
||||
validateRunFromBlock,
|
||||
} from '@/executor/utils/run-from-block'
|
||||
import {
|
||||
buildResolutionFromBlock,
|
||||
buildStartBlockOutput,
|
||||
resolveExecutorStartBlock,
|
||||
} from '@/executor/utils/start-block'
|
||||
import {
|
||||
extractLoopIdFromSentinel,
|
||||
extractParallelIdFromSentinel,
|
||||
} from '@/executor/utils/subflow-utils'
|
||||
import { VariableResolver } from '@/executor/variables/resolver'
|
||||
import type { SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
@@ -48,7 +62,10 @@ export class DAGExecutor {
|
||||
|
||||
async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> {
|
||||
const savedIncomingEdges = this.contextExtensions.dagIncomingEdges
|
||||
const dag = this.dagBuilder.build(this.workflow, triggerBlockId, savedIncomingEdges)
|
||||
const dag = this.dagBuilder.build(this.workflow, {
|
||||
triggerBlockId,
|
||||
savedIncomingEdges,
|
||||
})
|
||||
const { context, state } = this.createExecutionContext(workflowId, triggerBlockId)
|
||||
|
||||
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
|
||||
@@ -89,17 +106,156 @@ export class DAGExecutor {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute from a specific block using cached outputs for upstream blocks.
|
||||
*/
|
||||
async executeFromBlock(
|
||||
workflowId: string,
|
||||
startBlockId: string,
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
): Promise<ExecutionResult> {
|
||||
// Build full DAG with all blocks to compute upstream set for snapshot filtering
|
||||
// includeAllBlocks is needed because the startBlockId might be a trigger not reachable from the main trigger
|
||||
const dag = this.dagBuilder.build(this.workflow, { includeAllBlocks: true })
|
||||
|
||||
const executedBlocks = new Set(sourceSnapshot.executedBlocks)
|
||||
const validation = validateRunFromBlock(startBlockId, dag, executedBlocks)
|
||||
if (!validation.valid) {
|
||||
throw new Error(validation.error)
|
||||
}
|
||||
|
||||
const { dirtySet, upstreamSet, reachableUpstreamSet } = computeExecutionSets(dag, startBlockId)
|
||||
const effectiveStartBlockId = resolveContainerToSentinelStart(startBlockId, dag) ?? startBlockId
|
||||
|
||||
// Extract container IDs from sentinel IDs in reachable upstream set
|
||||
// Use reachableUpstreamSet (not upstreamSet) to preserve sibling branch outputs
|
||||
// Example: A->C, B->C where C references A.result || B.result
|
||||
// When running from A, B's output should be preserved for C to reference
|
||||
const reachableContainerIds = new Set<string>()
|
||||
for (const nodeId of reachableUpstreamSet) {
|
||||
const loopId = extractLoopIdFromSentinel(nodeId)
|
||||
if (loopId) reachableContainerIds.add(loopId)
|
||||
const parallelId = extractParallelIdFromSentinel(nodeId)
|
||||
if (parallelId) reachableContainerIds.add(parallelId)
|
||||
}
|
||||
|
||||
// Filter snapshot to include all blocks reachable from dirty blocks
|
||||
// This preserves sibling branch outputs that dirty blocks may reference
|
||||
const filteredBlockStates: Record<string, any> = {}
|
||||
for (const [blockId, state] of Object.entries(sourceSnapshot.blockStates)) {
|
||||
if (reachableUpstreamSet.has(blockId) || reachableContainerIds.has(blockId)) {
|
||||
filteredBlockStates[blockId] = state
|
||||
}
|
||||
}
|
||||
const filteredExecutedBlocks = sourceSnapshot.executedBlocks.filter(
|
||||
(id) => reachableUpstreamSet.has(id) || reachableContainerIds.has(id)
|
||||
)
|
||||
|
||||
// Filter loop/parallel executions to only include reachable containers
|
||||
const filteredLoopExecutions: Record<string, any> = {}
|
||||
if (sourceSnapshot.loopExecutions) {
|
||||
for (const [loopId, execution] of Object.entries(sourceSnapshot.loopExecutions)) {
|
||||
if (reachableContainerIds.has(loopId)) {
|
||||
filteredLoopExecutions[loopId] = execution
|
||||
}
|
||||
}
|
||||
}
|
||||
const filteredParallelExecutions: Record<string, any> = {}
|
||||
if (sourceSnapshot.parallelExecutions) {
|
||||
for (const [parallelId, execution] of Object.entries(sourceSnapshot.parallelExecutions)) {
|
||||
if (reachableContainerIds.has(parallelId)) {
|
||||
filteredParallelExecutions[parallelId] = execution
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const filteredSnapshot: SerializableExecutionState = {
|
||||
...sourceSnapshot,
|
||||
blockStates: filteredBlockStates,
|
||||
executedBlocks: filteredExecutedBlocks,
|
||||
loopExecutions: filteredLoopExecutions,
|
||||
parallelExecutions: filteredParallelExecutions,
|
||||
}
|
||||
|
||||
logger.info('Executing from block', {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
effectiveStartBlockId,
|
||||
dirtySetSize: dirtySet.size,
|
||||
upstreamSetSize: upstreamSet.size,
|
||||
reachableUpstreamSetSize: reachableUpstreamSet.size,
|
||||
})
|
||||
|
||||
// Remove incoming edges from non-dirty sources so convergent blocks don't wait for cached upstream
|
||||
for (const nodeId of dirtySet) {
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
const nonDirtyIncoming: string[] = []
|
||||
for (const sourceId of node.incomingEdges) {
|
||||
if (!dirtySet.has(sourceId)) {
|
||||
nonDirtyIncoming.push(sourceId)
|
||||
}
|
||||
}
|
||||
|
||||
for (const sourceId of nonDirtyIncoming) {
|
||||
node.incomingEdges.delete(sourceId)
|
||||
}
|
||||
}
|
||||
|
||||
const runFromBlockContext = { startBlockId: effectiveStartBlockId, dirtySet }
|
||||
const { context, state } = this.createExecutionContext(workflowId, undefined, {
|
||||
snapshotState: filteredSnapshot,
|
||||
runFromBlockContext,
|
||||
})
|
||||
|
||||
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
|
||||
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
|
||||
loopOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
|
||||
parallelOrchestrator.setResolver(resolver)
|
||||
parallelOrchestrator.setContextExtensions(this.contextExtensions)
|
||||
const allHandlers = createBlockHandlers()
|
||||
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
|
||||
const edgeManager = new EdgeManager(dag)
|
||||
loopOrchestrator.setEdgeManager(edgeManager)
|
||||
const nodeOrchestrator = new NodeExecutionOrchestrator(
|
||||
dag,
|
||||
state,
|
||||
blockExecutor,
|
||||
loopOrchestrator,
|
||||
parallelOrchestrator
|
||||
)
|
||||
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
|
||||
|
||||
return await engine.run()
|
||||
}
|
||||
|
||||
private createExecutionContext(
|
||||
workflowId: string,
|
||||
triggerBlockId?: string
|
||||
triggerBlockId?: string,
|
||||
overrides?: {
|
||||
snapshotState?: SerializableExecutionState
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
}
|
||||
): { context: ExecutionContext; state: ExecutionState } {
|
||||
const snapshotState = this.contextExtensions.snapshotState
|
||||
const snapshotState = overrides?.snapshotState ?? this.contextExtensions.snapshotState
|
||||
const blockStates = snapshotState?.blockStates
|
||||
? new Map(Object.entries(snapshotState.blockStates))
|
||||
: new Map<string, BlockState>()
|
||||
const executedBlocks = snapshotState?.executedBlocks
|
||||
let executedBlocks = snapshotState?.executedBlocks
|
||||
? new Set(snapshotState.executedBlocks)
|
||||
: new Set<string>()
|
||||
|
||||
if (overrides?.runFromBlockContext) {
|
||||
const { dirtySet } = overrides.runFromBlockContext
|
||||
executedBlocks = new Set([...executedBlocks].filter((id) => !dirtySet.has(id)))
|
||||
logger.info('Cleared executed status for dirty blocks', {
|
||||
dirtySetSize: dirtySet.size,
|
||||
remainingExecutedBlocks: executedBlocks.size,
|
||||
})
|
||||
}
|
||||
|
||||
const state = new ExecutionState(blockStates, executedBlocks)
|
||||
|
||||
const context: ExecutionContext = {
|
||||
@@ -109,7 +265,7 @@ export class DAGExecutor {
|
||||
userId: this.contextExtensions.userId,
|
||||
isDeployedContext: this.contextExtensions.isDeployedContext,
|
||||
blockStates: state.getBlockStates(),
|
||||
blockLogs: snapshotState?.blockLogs ?? [],
|
||||
blockLogs: overrides?.runFromBlockContext ? [] : (snapshotState?.blockLogs ?? []),
|
||||
metadata: {
|
||||
...this.contextExtensions.metadata,
|
||||
startTime: new Date().toISOString(),
|
||||
@@ -169,6 +325,8 @@ export class DAGExecutor {
|
||||
abortSignal: this.contextExtensions.abortSignal,
|
||||
includeFileBase64: this.contextExtensions.includeFileBase64,
|
||||
base64MaxBytes: this.contextExtensions.base64MaxBytes,
|
||||
runFromBlockContext: overrides?.runFromBlockContext,
|
||||
stopAfterBlockId: this.contextExtensions.stopAfterBlockId,
|
||||
}
|
||||
|
||||
if (this.contextExtensions.resumeFromSnapshot) {
|
||||
@@ -193,6 +351,15 @@ export class DAGExecutor {
|
||||
pendingBlocks: context.metadata.pendingBlocks,
|
||||
skipStarterBlockInit: true,
|
||||
})
|
||||
} else if (overrides?.runFromBlockContext) {
|
||||
// In run-from-block mode, initialize the start block only if it's a regular block
|
||||
// Skip for sentinels/containers (loop/parallel) which aren't real blocks
|
||||
const startBlockId = overrides.runFromBlockContext.startBlockId
|
||||
const isRegularBlock = this.workflow.blocks.some((b) => b.id === startBlockId)
|
||||
|
||||
if (isRegularBlock) {
|
||||
this.initializeStarterBlock(context, state, startBlockId)
|
||||
}
|
||||
} else {
|
||||
this.initializeStarterBlock(context, state, triggerBlockId)
|
||||
}
|
||||
|
||||
@@ -27,6 +27,8 @@ export interface ParallelScope {
|
||||
items?: any[]
|
||||
/** Error message if parallel validation failed (e.g., exceeded max branches) */
|
||||
validationError?: string
|
||||
/** Whether the parallel has an empty distribution and should be skipped */
|
||||
isEmpty?: boolean
|
||||
}
|
||||
|
||||
export class ExecutionState implements BlockStateController {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { BlockLog, BlockState, NormalizedBlockOutput } from '@/executor/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export interface ExecutionMetadata {
|
||||
@@ -105,6 +106,17 @@ export interface ContextExtensions {
|
||||
output: { input?: any; output: NormalizedBlockOutput; executionTime: number },
|
||||
iterationContext?: IterationContext
|
||||
) => Promise<void>
|
||||
|
||||
/**
|
||||
* Run-from-block configuration. When provided, executor runs in partial
|
||||
* execution mode starting from the specified block.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
|
||||
/**
|
||||
* Stop execution after this block completes. Used for "run until block" feature.
|
||||
*/
|
||||
stopAfterBlockId?: string
|
||||
}
|
||||
|
||||
export interface WorkflowInput {
|
||||
|
||||
@@ -276,7 +276,16 @@ export class LoopOrchestrator {
|
||||
scope: LoopScope
|
||||
): LoopContinuationResult {
|
||||
const results = scope.allIterationOutputs
|
||||
this.state.setBlockOutput(loopId, { results }, DEFAULTS.EXECUTION_TIME)
|
||||
const output = { results }
|
||||
this.state.setBlockOutput(loopId, output, DEFAULTS.EXECUTION_TIME)
|
||||
|
||||
// Emit onBlockComplete for the loop container so the UI can track it
|
||||
if (this.contextExtensions?.onBlockComplete) {
|
||||
this.contextExtensions.onBlockComplete(loopId, 'Loop', 'loop', {
|
||||
output,
|
||||
executionTime: DEFAULTS.EXECUTION_TIME,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
shouldContinue: false,
|
||||
@@ -386,10 +395,10 @@ export class LoopOrchestrator {
|
||||
return true
|
||||
}
|
||||
|
||||
// forEach: skip if items array is empty
|
||||
if (scope.loopType === 'forEach') {
|
||||
if (!scope.items || scope.items.length === 0) {
|
||||
logger.info('ForEach loop has empty items, skipping loop body', { loopId })
|
||||
logger.info('ForEach loop has empty collection, skipping loop body', { loopId })
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
@@ -399,6 +408,8 @@ export class LoopOrchestrator {
|
||||
if (scope.loopType === 'for') {
|
||||
if (scope.maxIterations === 0) {
|
||||
logger.info('For loop has 0 iterations, skipping loop body', { loopId })
|
||||
// Set empty output for the loop
|
||||
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
|
||||
@@ -31,7 +31,18 @@ export class NodeExecutionOrchestrator {
|
||||
throw new Error(`Node not found in DAG: ${nodeId}`)
|
||||
}
|
||||
|
||||
if (this.state.hasExecuted(nodeId)) {
|
||||
if (ctx.runFromBlockContext && !ctx.runFromBlockContext.dirtySet.has(nodeId)) {
|
||||
const cachedOutput = this.state.getBlockOutput(nodeId) || {}
|
||||
logger.debug('Skipping non-dirty block in run-from-block mode', { nodeId })
|
||||
return {
|
||||
nodeId,
|
||||
output: cachedOutput,
|
||||
isFinalOutput: false,
|
||||
}
|
||||
}
|
||||
|
||||
const isDirtyBlock = ctx.runFromBlockContext?.dirtySet.has(nodeId) ?? false
|
||||
if (!isDirtyBlock && this.state.hasExecuted(nodeId)) {
|
||||
const output = this.state.getBlockOutput(nodeId) || {}
|
||||
return {
|
||||
nodeId,
|
||||
@@ -97,7 +108,7 @@ export class NodeExecutionOrchestrator {
|
||||
if (loopId) {
|
||||
const shouldExecute = await this.loopOrchestrator.evaluateInitialCondition(ctx, loopId)
|
||||
if (!shouldExecute) {
|
||||
logger.info('While loop initial condition false, skipping loop body', { loopId })
|
||||
logger.info('Loop initial condition false, skipping loop body', { loopId })
|
||||
return {
|
||||
sentinelStart: true,
|
||||
shouldExit: true,
|
||||
@@ -158,6 +169,17 @@ export class NodeExecutionOrchestrator {
|
||||
this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
|
||||
}
|
||||
}
|
||||
|
||||
const scope = this.parallelOrchestrator.getParallelScope(ctx, parallelId)
|
||||
if (scope?.isEmpty) {
|
||||
logger.info('Parallel has empty distribution, skipping parallel body', { parallelId })
|
||||
return {
|
||||
sentinelStart: true,
|
||||
shouldExit: true,
|
||||
selectedRoute: EDGE.PARALLEL_EXIT,
|
||||
}
|
||||
}
|
||||
|
||||
return { sentinelStart: true }
|
||||
}
|
||||
|
||||
|
||||
@@ -61,11 +61,13 @@ export class ParallelOrchestrator {
|
||||
|
||||
let items: any[] | undefined
|
||||
let branchCount: number
|
||||
let isEmpty = false
|
||||
|
||||
try {
|
||||
const resolved = this.resolveBranchCount(ctx, parallelConfig)
|
||||
const resolved = this.resolveBranchCount(ctx, parallelConfig, parallelId)
|
||||
branchCount = resolved.branchCount
|
||||
items = resolved.items
|
||||
isEmpty = resolved.isEmpty ?? false
|
||||
} catch (error) {
|
||||
const errorMessage = `Parallel Items did not resolve: ${error instanceof Error ? error.message : String(error)}`
|
||||
logger.error(errorMessage, { parallelId, distribution: parallelConfig.distribution })
|
||||
@@ -91,6 +93,34 @@ export class ParallelOrchestrator {
|
||||
throw new Error(branchError)
|
||||
}
|
||||
|
||||
// Handle empty distribution - skip parallel body
|
||||
if (isEmpty || branchCount === 0) {
|
||||
const scope: ParallelScope = {
|
||||
parallelId,
|
||||
totalBranches: 0,
|
||||
branchOutputs: new Map(),
|
||||
completedCount: 0,
|
||||
totalExpectedNodes: 0,
|
||||
items: [],
|
||||
isEmpty: true,
|
||||
}
|
||||
|
||||
if (!ctx.parallelExecutions) {
|
||||
ctx.parallelExecutions = new Map()
|
||||
}
|
||||
ctx.parallelExecutions.set(parallelId, scope)
|
||||
|
||||
// Set empty output for the parallel
|
||||
this.state.setBlockOutput(parallelId, { results: [] })
|
||||
|
||||
logger.info('Parallel scope initialized with empty distribution, skipping body', {
|
||||
parallelId,
|
||||
branchCount: 0,
|
||||
})
|
||||
|
||||
return scope
|
||||
}
|
||||
|
||||
const { entryNodes } = this.expander.expandParallel(this.dag, parallelId, branchCount, items)
|
||||
|
||||
const scope: ParallelScope = {
|
||||
@@ -127,15 +157,17 @@ export class ParallelOrchestrator {
|
||||
|
||||
private resolveBranchCount(
|
||||
ctx: ExecutionContext,
|
||||
config: SerializedParallel
|
||||
): { branchCount: number; items?: any[] } {
|
||||
config: SerializedParallel,
|
||||
parallelId: string
|
||||
): { branchCount: number; items?: any[]; isEmpty?: boolean } {
|
||||
if (config.parallelType === 'count') {
|
||||
return { branchCount: config.count ?? 1 }
|
||||
}
|
||||
|
||||
const items = this.resolveDistributionItems(ctx, config)
|
||||
if (items.length === 0) {
|
||||
return { branchCount: config.count ?? 1 }
|
||||
logger.info('Parallel has empty distribution, skipping parallel body', { parallelId })
|
||||
return { branchCount: 0, items: [], isEmpty: true }
|
||||
}
|
||||
|
||||
return { branchCount: items.length, items }
|
||||
@@ -228,9 +260,17 @@ export class ParallelOrchestrator {
|
||||
const branchOutputs = scope.branchOutputs.get(i) || []
|
||||
results.push(branchOutputs)
|
||||
}
|
||||
this.state.setBlockOutput(parallelId, {
|
||||
results,
|
||||
})
|
||||
const output = { results }
|
||||
this.state.setBlockOutput(parallelId, output)
|
||||
|
||||
// Emit onBlockComplete for the parallel container so the UI can track it
|
||||
if (this.contextExtensions?.onBlockComplete) {
|
||||
this.contextExtensions.onBlockComplete(parallelId, 'Parallel', 'parallel', {
|
||||
output,
|
||||
executionTime: 0,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
allBranchesComplete: true,
|
||||
results,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { TraceSpan } from '@/lib/logs/types'
|
||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||
import type { BlockOutput } from '@/blocks/types'
|
||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
export interface UserFile {
|
||||
@@ -250,6 +251,17 @@ export interface ExecutionContext {
|
||||
* will not have their base64 content fetched.
|
||||
*/
|
||||
base64MaxBytes?: number
|
||||
|
||||
/**
|
||||
* Context for "run from block" mode. When present, only blocks in dirtySet
|
||||
* will be executed; others return cached outputs from the source snapshot.
|
||||
*/
|
||||
runFromBlockContext?: RunFromBlockContext
|
||||
|
||||
/**
|
||||
* Stop execution after this block completes. Used for "run until block" feature.
|
||||
*/
|
||||
stopAfterBlockId?: string
|
||||
}
|
||||
|
||||
export interface ExecutionResult {
|
||||
|
||||
1537
apps/sim/executor/utils/run-from-block.test.ts
Normal file
1537
apps/sim/executor/utils/run-from-block.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
219
apps/sim/executor/utils/run-from-block.ts
Normal file
219
apps/sim/executor/utils/run-from-block.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { LOOP, PARALLEL } from '@/executor/constants'
|
||||
import type { DAG } from '@/executor/dag/builder'
|
||||
|
||||
/**
|
||||
* Builds the sentinel-start node ID for a loop.
|
||||
*/
|
||||
function buildLoopSentinelStartId(loopId: string): string {
|
||||
return `${LOOP.SENTINEL.PREFIX}${loopId}${LOOP.SENTINEL.START_SUFFIX}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the sentinel-start node ID for a parallel.
|
||||
*/
|
||||
function buildParallelSentinelStartId(parallelId: string): string {
|
||||
return `${PARALLEL.SENTINEL.PREFIX}${parallelId}${PARALLEL.SENTINEL.START_SUFFIX}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a block ID is a loop or parallel container and returns the sentinel-start ID if so.
|
||||
* Returns null if the block is not a container.
|
||||
*/
|
||||
export function resolveContainerToSentinelStart(blockId: string, dag: DAG): string | null {
|
||||
if (dag.loopConfigs.has(blockId)) {
|
||||
return buildLoopSentinelStartId(blockId)
|
||||
}
|
||||
if (dag.parallelConfigs.has(blockId)) {
|
||||
return buildParallelSentinelStartId(blockId)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of validating a block for run-from-block execution.
|
||||
*/
|
||||
export interface RunFromBlockValidation {
|
||||
valid: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Context for run-from-block execution mode.
|
||||
*/
|
||||
export interface RunFromBlockContext {
|
||||
/** The block ID to start execution from */
|
||||
startBlockId: string
|
||||
/** Set of block IDs that need re-execution (start block + all downstream) */
|
||||
dirtySet: Set<string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of computing execution sets for run-from-block mode.
|
||||
*/
|
||||
export interface ExecutionSets {
|
||||
/** Blocks that need re-execution (start block + all downstream) */
|
||||
dirtySet: Set<string>
|
||||
/** Blocks that are upstream (ancestors) of the start block */
|
||||
upstreamSet: Set<string>
|
||||
/** Blocks that are upstream of any dirty block (for snapshot preservation) */
|
||||
reachableUpstreamSet: Set<string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the dirty set, upstream set, and reachable upstream set.
|
||||
* - Dirty set: start block + all blocks reachable via outgoing edges (need re-execution)
|
||||
* - Upstream set: all blocks reachable via incoming edges from the start block
|
||||
* - Reachable upstream set: all non-dirty blocks that are upstream of ANY dirty block
|
||||
* (includes sibling branches that dirty blocks may reference)
|
||||
*
|
||||
* For loop/parallel containers, starts from the sentinel-start node and includes
|
||||
* the container ID itself in the dirty set.
|
||||
*
|
||||
* @param dag - The workflow DAG
|
||||
* @param startBlockId - The block to start execution from
|
||||
* @returns Object containing dirtySet, upstreamSet, and reachableUpstreamSet
|
||||
*/
|
||||
export function computeExecutionSets(dag: DAG, startBlockId: string): ExecutionSets {
|
||||
const dirty = new Set<string>([startBlockId])
|
||||
const upstream = new Set<string>()
|
||||
const sentinelStartId = resolveContainerToSentinelStart(startBlockId, dag)
|
||||
const traversalStartId = sentinelStartId ?? startBlockId
|
||||
|
||||
if (sentinelStartId) {
|
||||
dirty.add(sentinelStartId)
|
||||
}
|
||||
|
||||
// BFS downstream for dirty set
|
||||
const downstreamQueue = [traversalStartId]
|
||||
while (downstreamQueue.length > 0) {
|
||||
const nodeId = downstreamQueue.shift()!
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
for (const [, edge] of node.outgoingEdges) {
|
||||
if (!dirty.has(edge.target)) {
|
||||
dirty.add(edge.target)
|
||||
downstreamQueue.push(edge.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BFS upstream from start block for upstream set
|
||||
const upstreamQueue = [traversalStartId]
|
||||
while (upstreamQueue.length > 0) {
|
||||
const nodeId = upstreamQueue.shift()!
|
||||
const node = dag.nodes.get(nodeId)
|
||||
if (!node) continue
|
||||
|
||||
for (const sourceId of node.incomingEdges) {
|
||||
if (!upstream.has(sourceId)) {
|
||||
upstream.add(sourceId)
|
||||
upstreamQueue.push(sourceId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compute reachable upstream: all non-dirty blocks upstream of ANY dirty block
|
||||
// This handles the case where a dirty block (like C in A->C, B->C) may reference
|
||||
// sibling branches (like B when running from A)
|
||||
const reachableUpstream = new Set<string>()
|
||||
for (const dirtyNodeId of dirty) {
|
||||
const node = dag.nodes.get(dirtyNodeId)
|
||||
if (!node) continue
|
||||
|
||||
// BFS upstream from this dirty node
|
||||
const queue = [...node.incomingEdges]
|
||||
while (queue.length > 0) {
|
||||
const sourceId = queue.shift()!
|
||||
if (reachableUpstream.has(sourceId) || dirty.has(sourceId)) continue
|
||||
|
||||
reachableUpstream.add(sourceId)
|
||||
const sourceNode = dag.nodes.get(sourceId)
|
||||
if (sourceNode) {
|
||||
queue.push(...sourceNode.incomingEdges)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { dirtySet: dirty, upstreamSet: upstream, reachableUpstreamSet: reachableUpstream }
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates that a block can be used as a run-from-block starting point.
|
||||
*
|
||||
* Validation rules:
|
||||
* - Block must exist in the DAG (or be a loop/parallel container)
|
||||
* - Block cannot be inside a loop (but loop containers are allowed)
|
||||
* - Block cannot be inside a parallel (but parallel containers are allowed)
|
||||
* - Block cannot be a sentinel node
|
||||
* - All upstream dependencies must have been executed (have cached outputs)
|
||||
*
|
||||
* @param blockId - The block ID to validate
|
||||
* @param dag - The workflow DAG
|
||||
* @param executedBlocks - Set of blocks that were executed in the source run
|
||||
* @returns Validation result with error message if invalid
|
||||
*/
|
||||
export function validateRunFromBlock(
|
||||
blockId: string,
|
||||
dag: DAG,
|
||||
executedBlocks: Set<string>
|
||||
): RunFromBlockValidation {
|
||||
const node = dag.nodes.get(blockId)
|
||||
const isLoopContainer = dag.loopConfigs.has(blockId)
|
||||
const isParallelContainer = dag.parallelConfigs.has(blockId)
|
||||
const isContainer = isLoopContainer || isParallelContainer
|
||||
|
||||
if (!node && !isContainer) {
|
||||
return { valid: false, error: `Block not found in workflow: ${blockId}` }
|
||||
}
|
||||
|
||||
if (isContainer) {
|
||||
const sentinelStartId = resolveContainerToSentinelStart(blockId, dag)
|
||||
if (!sentinelStartId || !dag.nodes.has(sentinelStartId)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Container sentinel not found for: ${blockId}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (node) {
|
||||
if (node.metadata.isLoopNode) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside loop: ${node.metadata.loopId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isParallelBranch) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Cannot run from block inside parallel: ${node.metadata.parallelId}`,
|
||||
}
|
||||
}
|
||||
|
||||
if (node.metadata.isSentinel) {
|
||||
return { valid: false, error: 'Cannot run from sentinel node' }
|
||||
}
|
||||
|
||||
// Check immediate upstream dependencies were executed
|
||||
for (const sourceId of node.incomingEdges) {
|
||||
const sourceNode = dag.nodes.get(sourceId)
|
||||
// Skip sentinel nodes - they're internal and not in executedBlocks
|
||||
if (sourceNode?.metadata.isSentinel) continue
|
||||
|
||||
// Skip trigger nodes - they're entry points and don't need prior execution
|
||||
// A trigger node has no incoming edges
|
||||
if (sourceNode && sourceNode.incomingEdges.size === 0) continue
|
||||
|
||||
if (!executedBlocks.has(sourceId)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: `Upstream dependency not executed: ${sourceId}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { valid: true }
|
||||
}
|
||||
@@ -1,10 +1,85 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { SubflowType } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
async function processSSEStream(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
callbacks: ExecutionStreamCallbacks,
|
||||
logPrefix: string
|
||||
): Promise<void> {
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
const lines = buffer.split('\n\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) continue
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
if (data === '[DONE]') {
|
||||
logger.info(`${logPrefix} stream completed`)
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
}
|
||||
|
||||
export interface ExecutionStreamCallbacks {
|
||||
onExecutionStarted?: (data: { startTime: string }) => void
|
||||
onExecutionCompleted?: (data: {
|
||||
@@ -68,6 +143,15 @@ export interface ExecuteStreamOptions {
|
||||
loops?: Record<string, any>
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
stopAfterBlockId?: string
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ExecuteFromBlockOptions {
|
||||
workflowId: string
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
input?: any
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
@@ -119,91 +203,7 @@ export function useExecutionStream() {
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
|
||||
if (done) {
|
||||
break
|
||||
}
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
const lines = buffer.split('\n\n')
|
||||
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.trim() || !line.startsWith('data: ')) {
|
||||
continue
|
||||
}
|
||||
|
||||
const data = line.substring(6).trim()
|
||||
|
||||
if (data === '[DONE]') {
|
||||
logger.info('Stream completed')
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const event = JSON.parse(data) as ExecutionEvent
|
||||
|
||||
logger.info('📡 SSE Event received:', {
|
||||
type: event.type,
|
||||
executionId: event.executionId,
|
||||
data: event.data,
|
||||
})
|
||||
|
||||
switch (event.type) {
|
||||
case 'execution:started':
|
||||
logger.info('🚀 Execution started')
|
||||
callbacks.onExecutionStarted?.(event.data)
|
||||
break
|
||||
case 'execution:completed':
|
||||
logger.info('✅ Execution completed')
|
||||
callbacks.onExecutionCompleted?.(event.data)
|
||||
break
|
||||
case 'execution:error':
|
||||
logger.error('❌ Execution error')
|
||||
callbacks.onExecutionError?.(event.data)
|
||||
break
|
||||
case 'execution:cancelled':
|
||||
logger.warn('🛑 Execution cancelled')
|
||||
callbacks.onExecutionCancelled?.(event.data)
|
||||
break
|
||||
case 'block:started':
|
||||
logger.info('🔷 Block started:', event.data.blockId)
|
||||
callbacks.onBlockStarted?.(event.data)
|
||||
break
|
||||
case 'block:completed':
|
||||
logger.info('✓ Block completed:', event.data.blockId)
|
||||
callbacks.onBlockCompleted?.(event.data)
|
||||
break
|
||||
case 'block:error':
|
||||
logger.error('✗ Block error:', event.data.blockId)
|
||||
callbacks.onBlockError?.(event.data)
|
||||
break
|
||||
case 'stream:chunk':
|
||||
callbacks.onStreamChunk?.(event.data)
|
||||
break
|
||||
case 'stream:done':
|
||||
logger.info('Stream done:', event.data.blockId)
|
||||
callbacks.onStreamDone?.(event.data)
|
||||
break
|
||||
default:
|
||||
logger.warn('Unknown event type:', (event as any).type)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse SSE event:', error, { data })
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
}
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
@@ -222,6 +222,70 @@ export function useExecutionStream() {
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
||||
|
||||
if (abortControllerRef.current) {
|
||||
abortControllerRef.current.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
currentExecutionRef.current = null
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute-from-block`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ startBlockId, sourceSnapshot, input }),
|
||||
signal: abortController.signal,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
let errorResponse: any
|
||||
try {
|
||||
errorResponse = await response.json()
|
||||
} catch {
|
||||
throw new Error(`Server error (${response.status}): ${response.statusText}`)
|
||||
}
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionRef.current = { workflowId, executionId }
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
}
|
||||
throw error
|
||||
} finally {
|
||||
abortControllerRef.current = null
|
||||
currentExecutionRef.current = null
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback(() => {
|
||||
const execution = currentExecutionRef.current
|
||||
if (execution) {
|
||||
@@ -239,6 +303,7 @@ export function useExecutionStream() {
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,7 +86,13 @@ describe('SnapshotService', () => {
|
||||
type: 'agent',
|
||||
position: { x: 100, y: 200 },
|
||||
|
||||
subBlocks: {},
|
||||
subBlocks: {
|
||||
prompt: {
|
||||
id: 'prompt',
|
||||
type: 'short-input',
|
||||
value: 'Hello world',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
@@ -104,8 +110,14 @@ describe('SnapshotService', () => {
|
||||
blocks: {
|
||||
block1: {
|
||||
...baseState.blocks.block1,
|
||||
// Different block state - we can change outputs to make it different
|
||||
outputs: { response: { type: 'string', description: 'different result' } },
|
||||
// Different subBlock value - this is a meaningful change
|
||||
subBlocks: {
|
||||
prompt: {
|
||||
id: 'prompt',
|
||||
type: 'short-input',
|
||||
value: 'Different prompt',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,12 +11,7 @@ import type {
|
||||
WorkflowExecutionSnapshotInsert,
|
||||
WorkflowState,
|
||||
} from '@/lib/logs/types'
|
||||
import {
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeValue,
|
||||
sortEdges,
|
||||
} from '@/lib/workflows/comparison'
|
||||
import { normalizedStringify, normalizeWorkflowState } from '@/lib/workflows/comparison'
|
||||
|
||||
const logger = createLogger('SnapshotService')
|
||||
|
||||
@@ -38,7 +33,9 @@ export class SnapshotService implements ISnapshotService {
|
||||
|
||||
const existingSnapshot = await this.getSnapshotByHash(workflowId, stateHash)
|
||||
if (existingSnapshot) {
|
||||
logger.debug(`Reusing existing snapshot for workflow ${workflowId} with hash ${stateHash}`)
|
||||
logger.info(
|
||||
`Reusing existing snapshot for workflow ${workflowId} (hash: ${stateHash.slice(0, 12)}...)`
|
||||
)
|
||||
return {
|
||||
snapshot: existingSnapshot,
|
||||
isNew: false,
|
||||
@@ -59,8 +56,9 @@ export class SnapshotService implements ISnapshotService {
|
||||
.values(snapshotData)
|
||||
.returning()
|
||||
|
||||
logger.debug(`Created new snapshot for workflow ${workflowId} with hash ${stateHash}`)
|
||||
logger.debug(`Stored full state with ${Object.keys(state.blocks || {}).length} blocks`)
|
||||
logger.info(
|
||||
`Created new snapshot for workflow ${workflowId} (hash: ${stateHash.slice(0, 12)}..., blocks: ${Object.keys(state.blocks || {}).length})`
|
||||
)
|
||||
return {
|
||||
snapshot: {
|
||||
...newSnapshot,
|
||||
@@ -112,7 +110,7 @@ export class SnapshotService implements ISnapshotService {
|
||||
}
|
||||
|
||||
computeStateHash(state: WorkflowState): string {
|
||||
const normalizedState = this.normalizeStateForHashing(state)
|
||||
const normalizedState = normalizeWorkflowState(state)
|
||||
const stateString = normalizedStringify(normalizedState)
|
||||
return createHash('sha256').update(stateString).digest('hex')
|
||||
}
|
||||
@@ -130,69 +128,6 @@ export class SnapshotService implements ISnapshotService {
|
||||
logger.info(`Cleaned up ${deletedCount} orphaned snapshots older than ${olderThanDays} days`)
|
||||
return deletedCount
|
||||
}
|
||||
|
||||
private normalizeStateForHashing(state: WorkflowState): any {
|
||||
// 1. Normalize and sort edges
|
||||
const normalizedEdges = sortEdges((state.edges || []).map(normalizeEdge))
|
||||
|
||||
// 2. Normalize blocks
|
||||
const normalizedBlocks: Record<string, any> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
const { position, layout, height, ...blockWithoutLayoutFields } = block
|
||||
|
||||
// Also exclude width/height from data object (container dimensions from autolayout)
|
||||
const {
|
||||
width: _dataWidth,
|
||||
height: _dataHeight,
|
||||
...dataRest
|
||||
} = blockWithoutLayoutFields.data || {}
|
||||
|
||||
// Normalize subBlocks
|
||||
const subBlocks = blockWithoutLayoutFields.subBlocks || {}
|
||||
const normalizedSubBlocks: Record<string, any> = {}
|
||||
|
||||
for (const [subBlockId, subBlock] of Object.entries(subBlocks)) {
|
||||
const value = subBlock.value ?? null
|
||||
|
||||
normalizedSubBlocks[subBlockId] = {
|
||||
type: subBlock.type,
|
||||
value: normalizeValue(value),
|
||||
...Object.fromEntries(
|
||||
Object.entries(subBlock).filter(([key]) => key !== 'value' && key !== 'type')
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
normalizedBlocks[blockId] = {
|
||||
...blockWithoutLayoutFields,
|
||||
data: dataRest,
|
||||
subBlocks: normalizedSubBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Normalize loops and parallels
|
||||
const normalizedLoops: Record<string, any> = {}
|
||||
for (const [loopId, loop] of Object.entries(state.loops || {})) {
|
||||
normalizedLoops[loopId] = normalizeValue(loop)
|
||||
}
|
||||
|
||||
const normalizedParallels: Record<string, any> = {}
|
||||
for (const [parallelId, parallel] of Object.entries(state.parallels || {})) {
|
||||
normalizedParallels[parallelId] = normalizeValue(parallel)
|
||||
}
|
||||
|
||||
// 4. Normalize variables (if present)
|
||||
const normalizedVariables = state.variables ? normalizeValue(state.variables) : undefined
|
||||
|
||||
return {
|
||||
blocks: normalizedBlocks,
|
||||
edges: normalizedEdges,
|
||||
loops: normalizedLoops,
|
||||
parallels: normalizedParallels,
|
||||
...(normalizedVariables !== undefined && { variables: normalizedVariables }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const snapshotService = new SnapshotService()
|
||||
|
||||
@@ -1,34 +1,10 @@
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { SYSTEM_SUBBLOCK_IDS, TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
import {
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeValue,
|
||||
normalizeVariables,
|
||||
sanitizeInputFormat,
|
||||
sanitizeTools,
|
||||
sanitizeVariable,
|
||||
sortEdges,
|
||||
} from './normalize'
|
||||
|
||||
/** Block with optional diff markers added by copilot */
|
||||
type BlockWithDiffMarkers = BlockState & {
|
||||
is_diff?: string
|
||||
field_diffs?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/** SubBlock with optional diff marker */
|
||||
type SubBlockWithDiffMarker = {
|
||||
id: string
|
||||
type: string
|
||||
value: unknown
|
||||
is_diff?: string
|
||||
}
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { normalizedStringify, normalizeWorkflowState } from './normalize'
|
||||
|
||||
/**
|
||||
* Compare the current workflow state with the deployed state to detect meaningful changes
|
||||
* Compare the current workflow state with the deployed state to detect meaningful changes.
|
||||
* Uses the shared normalizeWorkflowState function to ensure consistency with snapshot hashing.
|
||||
*
|
||||
* @param currentState - The current workflow state
|
||||
* @param deployedState - The deployed workflow state
|
||||
* @returns True if there are meaningful changes, false if only position changes or no changes
|
||||
@@ -40,236 +16,106 @@ export function hasWorkflowChanged(
|
||||
// If no deployed state exists, then the workflow has changed
|
||||
if (!deployedState) return true
|
||||
|
||||
// 1. Compare edges (connections between blocks)
|
||||
const currentEdges = currentState.edges || []
|
||||
const deployedEdges = deployedState.edges || []
|
||||
const normalizedCurrent = normalizeWorkflowState(currentState)
|
||||
const normalizedDeployed = normalizeWorkflowState(deployedState)
|
||||
|
||||
const normalizedCurrentEdges = sortEdges(currentEdges.map(normalizeEdge))
|
||||
const normalizedDeployedEdges = sortEdges(deployedEdges.map(normalizeEdge))
|
||||
const currentStr = normalizedStringify(normalizedCurrent)
|
||||
const deployedStr = normalizedStringify(normalizedDeployed)
|
||||
|
||||
if (
|
||||
normalizedStringify(normalizedCurrentEdges) !== normalizedStringify(normalizedDeployedEdges)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
if (currentStr !== deployedStr) {
|
||||
// Debug: Find what's different
|
||||
console.log('[hasWorkflowChanged] Detected differences:')
|
||||
|
||||
// 2. Compare blocks and their configurations
|
||||
const currentBlockIds = Object.keys(currentState.blocks || {}).sort()
|
||||
const deployedBlockIds = Object.keys(deployedState.blocks || {}).sort()
|
||||
|
||||
if (
|
||||
currentBlockIds.length !== deployedBlockIds.length ||
|
||||
normalizedStringify(currentBlockIds) !== normalizedStringify(deployedBlockIds)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
// 3. Build normalized representations of blocks for comparison
|
||||
const normalizedCurrentBlocks: Record<string, unknown> = {}
|
||||
const normalizedDeployedBlocks: Record<string, unknown> = {}
|
||||
|
||||
for (const blockId of currentBlockIds) {
|
||||
const currentBlock = currentState.blocks[blockId]
|
||||
const deployedBlock = deployedState.blocks[blockId]
|
||||
|
||||
// Destructure and exclude non-functional fields:
|
||||
// - position: visual positioning only
|
||||
// - subBlocks: handled separately below
|
||||
// - layout: contains measuredWidth/measuredHeight from autolayout
|
||||
// - height: block height measurement from autolayout
|
||||
// - outputs: derived from subBlocks (e.g., inputFormat), already compared via subBlocks
|
||||
// - is_diff, field_diffs: diff markers from copilot edits
|
||||
const currentBlockWithDiff = currentBlock as BlockWithDiffMarkers
|
||||
const deployedBlockWithDiff = deployedBlock as BlockWithDiffMarkers
|
||||
|
||||
const {
|
||||
position: _currentPos,
|
||||
subBlocks: currentSubBlocks = {},
|
||||
layout: _currentLayout,
|
||||
height: _currentHeight,
|
||||
outputs: _currentOutputs,
|
||||
is_diff: _currentIsDiff,
|
||||
field_diffs: _currentFieldDiffs,
|
||||
...currentRest
|
||||
} = currentBlockWithDiff
|
||||
|
||||
const {
|
||||
position: _deployedPos,
|
||||
subBlocks: deployedSubBlocks = {},
|
||||
layout: _deployedLayout,
|
||||
height: _deployedHeight,
|
||||
outputs: _deployedOutputs,
|
||||
is_diff: _deployedIsDiff,
|
||||
field_diffs: _deployedFieldDiffs,
|
||||
...deployedRest
|
||||
} = deployedBlockWithDiff
|
||||
|
||||
// Also exclude width/height from data object (container dimensions from autolayout)
|
||||
const {
|
||||
width: _currentDataWidth,
|
||||
height: _currentDataHeight,
|
||||
...currentDataRest
|
||||
} = currentRest.data || {}
|
||||
const {
|
||||
width: _deployedDataWidth,
|
||||
height: _deployedDataHeight,
|
||||
...deployedDataRest
|
||||
} = deployedRest.data || {}
|
||||
|
||||
normalizedCurrentBlocks[blockId] = {
|
||||
...currentRest,
|
||||
data: currentDataRest,
|
||||
subBlocks: undefined,
|
||||
// Compare edges
|
||||
if (
|
||||
normalizedStringify(normalizedCurrent.edges) !== normalizedStringify(normalizedDeployed.edges)
|
||||
) {
|
||||
console.log(' - Edges differ')
|
||||
console.log(' Current:', JSON.stringify(normalizedCurrent.edges, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(normalizedDeployed.edges, null, 2))
|
||||
}
|
||||
|
||||
normalizedDeployedBlocks[blockId] = {
|
||||
...deployedRest,
|
||||
data: deployedDataRest,
|
||||
subBlocks: undefined,
|
||||
}
|
||||
// Compare blocks
|
||||
const currentBlockIds = Object.keys(normalizedCurrent.blocks).sort()
|
||||
const deployedBlockIds = Object.keys(normalizedDeployed.blocks).sort()
|
||||
|
||||
// Get all subBlock IDs from both states, excluding runtime metadata and UI-only elements
|
||||
const allSubBlockIds = [
|
||||
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(deployedSubBlocks)]),
|
||||
]
|
||||
.filter(
|
||||
(id) => !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id) && !SYSTEM_SUBBLOCK_IDS.includes(id)
|
||||
)
|
||||
.sort()
|
||||
if (normalizedStringify(currentBlockIds) !== normalizedStringify(deployedBlockIds)) {
|
||||
console.log(' - Block IDs differ')
|
||||
console.log(' Current:', currentBlockIds)
|
||||
console.log(' Deployed:', deployedBlockIds)
|
||||
} else {
|
||||
for (const blockId of currentBlockIds) {
|
||||
const currentBlock = normalizedCurrent.blocks[blockId]
|
||||
const deployedBlock = normalizedDeployed.blocks[blockId]
|
||||
|
||||
// Normalize and compare each subBlock
|
||||
for (const subBlockId of allSubBlockIds) {
|
||||
// If the subBlock doesn't exist in either state, there's a difference
|
||||
if (!currentSubBlocks[subBlockId] || !deployedSubBlocks[subBlockId]) {
|
||||
return true
|
||||
}
|
||||
if (normalizedStringify(currentBlock) !== normalizedStringify(deployedBlock)) {
|
||||
console.log(` - Block "${blockId}" differs:`)
|
||||
|
||||
// Get values with special handling for null/undefined
|
||||
// Using unknown type since sanitization functions return different types
|
||||
let currentValue: unknown = currentSubBlocks[subBlockId].value ?? null
|
||||
let deployedValue: unknown = deployedSubBlocks[subBlockId].value ?? null
|
||||
// Compare subBlocks
|
||||
const currentSubBlockIds = Object.keys(currentBlock.subBlocks || {}).sort()
|
||||
const deployedSubBlockIds = Object.keys(deployedBlock.subBlocks || {}).sort()
|
||||
|
||||
if (subBlockId === 'tools' && Array.isArray(currentValue) && Array.isArray(deployedValue)) {
|
||||
currentValue = sanitizeTools(currentValue)
|
||||
deployedValue = sanitizeTools(deployedValue)
|
||||
}
|
||||
if (
|
||||
normalizedStringify(currentSubBlockIds) !== normalizedStringify(deployedSubBlockIds)
|
||||
) {
|
||||
console.log(' SubBlock IDs differ:')
|
||||
console.log(' Current:', currentSubBlockIds)
|
||||
console.log(' Deployed:', deployedSubBlockIds)
|
||||
} else {
|
||||
for (const subBlockId of currentSubBlockIds) {
|
||||
const currentSub = currentBlock.subBlocks[subBlockId]
|
||||
const deployedSub = deployedBlock.subBlocks[subBlockId]
|
||||
|
||||
if (
|
||||
subBlockId === 'inputFormat' &&
|
||||
Array.isArray(currentValue) &&
|
||||
Array.isArray(deployedValue)
|
||||
) {
|
||||
currentValue = sanitizeInputFormat(currentValue)
|
||||
deployedValue = sanitizeInputFormat(deployedValue)
|
||||
}
|
||||
if (normalizedStringify(currentSub) !== normalizedStringify(deployedSub)) {
|
||||
console.log(` SubBlock "${subBlockId}" differs:`)
|
||||
console.log(' Current:', JSON.stringify(currentSub, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(deployedSub, null, 2))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// For string values, compare directly to catch even small text changes
|
||||
if (typeof currentValue === 'string' && typeof deployedValue === 'string') {
|
||||
if (currentValue !== deployedValue) {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
// For other types, use normalized comparison
|
||||
const normalizedCurrentValue = normalizeValue(currentValue)
|
||||
const normalizedDeployedValue = normalizeValue(deployedValue)
|
||||
// Compare block properties (excluding subBlocks)
|
||||
const { subBlocks: _cs, ...currentBlockRest } = currentBlock
|
||||
const { subBlocks: _ds, ...deployedBlockRest } = deployedBlock
|
||||
|
||||
if (
|
||||
normalizedStringify(normalizedCurrentValue) !==
|
||||
normalizedStringify(normalizedDeployedValue)
|
||||
) {
|
||||
return true
|
||||
if (normalizedStringify(currentBlockRest) !== normalizedStringify(deployedBlockRest)) {
|
||||
console.log(' Block properties differ:')
|
||||
console.log(' Current:', JSON.stringify(currentBlockRest, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(deployedBlockRest, null, 2))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compare type and other properties (excluding diff markers and value)
|
||||
const currentSubBlockWithDiff = currentSubBlocks[subBlockId] as SubBlockWithDiffMarker
|
||||
const deployedSubBlockWithDiff = deployedSubBlocks[subBlockId] as SubBlockWithDiffMarker
|
||||
const { value: _cv, is_diff: _cd, ...currentSubBlockRest } = currentSubBlockWithDiff
|
||||
const { value: _dv, is_diff: _dd, ...deployedSubBlockRest } = deployedSubBlockWithDiff
|
||||
|
||||
if (normalizedStringify(currentSubBlockRest) !== normalizedStringify(deployedSubBlockRest)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
const blocksEqual =
|
||||
normalizedStringify(normalizedCurrentBlocks[blockId]) ===
|
||||
normalizedStringify(normalizedDeployedBlocks[blockId])
|
||||
|
||||
if (!blocksEqual) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Compare loops
|
||||
const currentLoops = currentState.loops || {}
|
||||
const deployedLoops = deployedState.loops || {}
|
||||
|
||||
const currentLoopIds = Object.keys(currentLoops).sort()
|
||||
const deployedLoopIds = Object.keys(deployedLoops).sort()
|
||||
|
||||
if (
|
||||
currentLoopIds.length !== deployedLoopIds.length ||
|
||||
normalizedStringify(currentLoopIds) !== normalizedStringify(deployedLoopIds)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (const loopId of currentLoopIds) {
|
||||
const normalizedCurrentLoop = normalizeValue(normalizeLoop(currentLoops[loopId]))
|
||||
const normalizedDeployedLoop = normalizeValue(normalizeLoop(deployedLoops[loopId]))
|
||||
|
||||
// Compare loops
|
||||
if (
|
||||
normalizedStringify(normalizedCurrentLoop) !== normalizedStringify(normalizedDeployedLoop)
|
||||
normalizedStringify(normalizedCurrent.loops) !== normalizedStringify(normalizedDeployed.loops)
|
||||
) {
|
||||
return true
|
||||
console.log(' - Loops differ')
|
||||
console.log(' Current:', JSON.stringify(normalizedCurrent.loops, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(normalizedDeployed.loops, null, 2))
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Compare parallels
|
||||
const currentParallels = currentState.parallels || {}
|
||||
const deployedParallels = deployedState.parallels || {}
|
||||
|
||||
const currentParallelIds = Object.keys(currentParallels).sort()
|
||||
const deployedParallelIds = Object.keys(deployedParallels).sort()
|
||||
|
||||
if (
|
||||
currentParallelIds.length !== deployedParallelIds.length ||
|
||||
normalizedStringify(currentParallelIds) !== normalizedStringify(deployedParallelIds)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
|
||||
for (const parallelId of currentParallelIds) {
|
||||
const normalizedCurrentParallel = normalizeValue(
|
||||
normalizeParallel(currentParallels[parallelId])
|
||||
)
|
||||
const normalizedDeployedParallel = normalizeValue(
|
||||
normalizeParallel(deployedParallels[parallelId])
|
||||
)
|
||||
|
||||
// Compare parallels
|
||||
if (
|
||||
normalizedStringify(normalizedCurrentParallel) !==
|
||||
normalizedStringify(normalizedDeployedParallel)
|
||||
normalizedStringify(normalizedCurrent.parallels) !==
|
||||
normalizedStringify(normalizedDeployed.parallels)
|
||||
) {
|
||||
return true
|
||||
console.log(' - Parallels differ')
|
||||
console.log(' Current:', JSON.stringify(normalizedCurrent.parallels, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(normalizedDeployed.parallels, null, 2))
|
||||
}
|
||||
}
|
||||
|
||||
// 6. Compare variables
|
||||
const currentVariables = normalizeVariables(currentState.variables)
|
||||
const deployedVariables = normalizeVariables(deployedState.variables)
|
||||
// Compare variables
|
||||
if (
|
||||
normalizedStringify(normalizedCurrent.variables) !==
|
||||
normalizedStringify(normalizedDeployed.variables)
|
||||
) {
|
||||
console.log(' - Variables differ')
|
||||
console.log(' Current:', JSON.stringify(normalizedCurrent.variables, null, 2))
|
||||
console.log(' Deployed:', JSON.stringify(normalizedDeployed.variables, null, 2))
|
||||
}
|
||||
|
||||
const normalizedCurrentVars = normalizeValue(
|
||||
Object.fromEntries(Object.entries(currentVariables).map(([id, v]) => [id, sanitizeVariable(v)]))
|
||||
)
|
||||
const normalizedDeployedVars = normalizeValue(
|
||||
Object.fromEntries(
|
||||
Object.entries(deployedVariables).map(([id, v]) => [id, sanitizeVariable(v)])
|
||||
)
|
||||
)
|
||||
|
||||
if (normalizedStringify(normalizedCurrentVars) !== normalizedStringify(normalizedDeployedVars)) {
|
||||
return true
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
export { hasWorkflowChanged } from './compare'
|
||||
export type { NormalizedWorkflowState } from './normalize'
|
||||
export {
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeValue,
|
||||
normalizeVariables,
|
||||
normalizeWorkflowState,
|
||||
sanitizeInputFormat,
|
||||
sanitizeTools,
|
||||
sanitizeVariable,
|
||||
sortEdges,
|
||||
} from './normalize'
|
||||
|
||||
@@ -4,7 +4,14 @@
|
||||
*/
|
||||
|
||||
import type { Edge } from 'reactflow'
|
||||
import type { Loop, Parallel, Variable } from '@/stores/workflows/workflow/types'
|
||||
import type {
|
||||
BlockState,
|
||||
Loop,
|
||||
Parallel,
|
||||
Variable,
|
||||
WorkflowState,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
import { SYSTEM_SUBBLOCK_IDS, TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
|
||||
/**
|
||||
* Normalizes a value for consistent comparison by sorting object keys recursively
|
||||
@@ -220,3 +227,155 @@ export function sortEdges(
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/** Block with optional diff markers added by copilot */
|
||||
type BlockWithDiffMarkers = BlockState & {
|
||||
is_diff?: string
|
||||
field_diffs?: Record<string, unknown>
|
||||
}
|
||||
|
||||
/** SubBlock with optional diff marker */
|
||||
type SubBlockWithDiffMarker = {
|
||||
id: string
|
||||
type: string
|
||||
value: unknown
|
||||
is_diff?: string
|
||||
}
|
||||
|
||||
/** Normalized block structure for comparison */
|
||||
interface NormalizedBlock {
|
||||
[key: string]: unknown
|
||||
data: Record<string, unknown>
|
||||
subBlocks: Record<string, NormalizedSubBlock>
|
||||
}
|
||||
|
||||
/** Normalized subBlock structure */
|
||||
interface NormalizedSubBlock {
|
||||
[key: string]: unknown
|
||||
value: unknown
|
||||
}
|
||||
|
||||
/** Normalized workflow state structure */
|
||||
export interface NormalizedWorkflowState {
|
||||
blocks: Record<string, NormalizedBlock>
|
||||
edges: Array<{
|
||||
source: string
|
||||
sourceHandle?: string | null
|
||||
target: string
|
||||
targetHandle?: string | null
|
||||
}>
|
||||
loops: Record<string, unknown>
|
||||
parallels: Record<string, unknown>
|
||||
variables: unknown
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a workflow state for comparison or hashing.
|
||||
* Excludes non-functional fields (position, layout, height, outputs, diff markers)
|
||||
* and system/trigger runtime subBlocks.
|
||||
*
|
||||
* @param state - The workflow state to normalize
|
||||
* @returns A normalized workflow state suitable for comparison or hashing
|
||||
*/
|
||||
export function normalizeWorkflowState(state: WorkflowState): NormalizedWorkflowState {
|
||||
// 1. Normalize and sort edges (connection-relevant fields only)
|
||||
const normalizedEdges = sortEdges((state.edges || []).map(normalizeEdge))
|
||||
|
||||
// 2. Normalize blocks
|
||||
const normalizedBlocks: Record<string, NormalizedBlock> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
const blockWithDiff = block as BlockWithDiffMarkers
|
||||
|
||||
// Exclude non-functional fields:
|
||||
// - position: visual positioning only
|
||||
// - layout: contains measuredWidth/measuredHeight from autolayout
|
||||
// - height: block height measurement from autolayout
|
||||
// - outputs: derived from subBlocks, already compared via subBlocks
|
||||
// - is_diff, field_diffs: diff markers from copilot edits
|
||||
// - subBlocks: handled separately
|
||||
const {
|
||||
position: _position,
|
||||
subBlocks: blockSubBlocks = {},
|
||||
layout: _layout,
|
||||
height: _height,
|
||||
outputs: _outputs,
|
||||
is_diff: _isDiff,
|
||||
field_diffs: _fieldDiffs,
|
||||
...blockRest
|
||||
} = blockWithDiff
|
||||
|
||||
// Exclude from data object:
|
||||
// - width/height: container dimensions from autolayout
|
||||
// - nodes: subflow node membership (derived/runtime for parallel/loop blocks)
|
||||
// - distribution: parallel distribution (derived/runtime)
|
||||
const {
|
||||
width: _dataWidth,
|
||||
height: _dataHeight,
|
||||
nodes: _dataNodes,
|
||||
distribution: _dataDistribution,
|
||||
...dataRest
|
||||
} = (blockRest.data || {}) as Record<string, unknown>
|
||||
|
||||
// Filter and normalize subBlocks (exclude system/trigger runtime subBlocks)
|
||||
const normalizedSubBlocks: Record<string, NormalizedSubBlock> = {}
|
||||
const subBlockIds = Object.keys(blockSubBlocks)
|
||||
.filter(
|
||||
(id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)
|
||||
)
|
||||
.sort()
|
||||
|
||||
for (const subBlockId of subBlockIds) {
|
||||
const subBlock = blockSubBlocks[subBlockId] as SubBlockWithDiffMarker
|
||||
let value: unknown = subBlock.value ?? null
|
||||
|
||||
// Sanitize UI-only fields from tools and inputFormat
|
||||
if (subBlockId === 'tools' && Array.isArray(value)) {
|
||||
value = sanitizeTools(value)
|
||||
}
|
||||
if (subBlockId === 'inputFormat' && Array.isArray(value)) {
|
||||
value = sanitizeInputFormat(value)
|
||||
}
|
||||
|
||||
// Exclude diff markers from subBlock
|
||||
const { value: _v, is_diff: _sd, ...subBlockRest } = subBlock
|
||||
|
||||
normalizedSubBlocks[subBlockId] = {
|
||||
...subBlockRest,
|
||||
value: normalizeValue(value),
|
||||
}
|
||||
}
|
||||
|
||||
normalizedBlocks[blockId] = {
|
||||
...blockRest,
|
||||
data: dataRest,
|
||||
subBlocks: normalizedSubBlocks,
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Normalize loops using specialized normalizeLoop (extracts only type-relevant fields)
|
||||
const normalizedLoops: Record<string, unknown> = {}
|
||||
for (const [loopId, loop] of Object.entries(state.loops || {})) {
|
||||
normalizedLoops[loopId] = normalizeValue(normalizeLoop(loop))
|
||||
}
|
||||
|
||||
// 4. Normalize parallels using specialized normalizeParallel
|
||||
const normalizedParallels: Record<string, unknown> = {}
|
||||
for (const [parallelId, parallel] of Object.entries(state.parallels || {})) {
|
||||
normalizedParallels[parallelId] = normalizeValue(normalizeParallel(parallel))
|
||||
}
|
||||
|
||||
// 5. Normalize variables (remove UI-only validationError field)
|
||||
const variables = normalizeVariables(state.variables)
|
||||
const normalizedVariablesObj = normalizeValue(
|
||||
Object.fromEntries(Object.entries(variables).map(([id, v]) => [id, sanitizeVariable(v)]))
|
||||
)
|
||||
|
||||
return {
|
||||
blocks: normalizedBlocks,
|
||||
edges: normalizedEdges,
|
||||
loops: normalizedLoops,
|
||||
parallels: normalizedParallels,
|
||||
variables: normalizedVariablesObj,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,9 +23,11 @@ import type {
|
||||
ContextExtensions,
|
||||
ExecutionCallbacks,
|
||||
IterationContext,
|
||||
SerializableExecutionState,
|
||||
} from '@/executor/execution/types'
|
||||
import type { ExecutionResult, NormalizedBlockOutput } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { buildParallelSentinelEndId, buildSentinelEndId } from '@/executor/utils/subflow-utils'
|
||||
import { Serializer } from '@/serializer'
|
||||
|
||||
const logger = createLogger('ExecutionCore')
|
||||
@@ -40,6 +42,12 @@ export interface ExecuteWorkflowCoreOptions {
|
||||
abortSignal?: AbortSignal
|
||||
includeFileBase64?: boolean
|
||||
base64MaxBytes?: number
|
||||
stopAfterBlockId?: string
|
||||
/** Run-from-block mode: execute starting from a specific block using cached upstream outputs */
|
||||
runFromBlock?: {
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
}
|
||||
}
|
||||
|
||||
function parseVariableValueByType(value: unknown, type: string): unknown {
|
||||
@@ -114,6 +122,8 @@ export async function executeWorkflowCore(
|
||||
abortSignal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
} = options
|
||||
const { metadata, workflow, input, workflowVariables, selectedOutputs } = snapshot
|
||||
const { requestId, workflowId, userId, triggerType, executionId, triggerBlockId, useDraftState } =
|
||||
@@ -246,6 +256,16 @@ export async function executeWorkflowCore(
|
||||
|
||||
processedInput = input || {}
|
||||
|
||||
// Resolve stopAfterBlockId for loop/parallel containers to their sentinel-end IDs
|
||||
let resolvedStopAfterBlockId = stopAfterBlockId
|
||||
if (stopAfterBlockId) {
|
||||
if (serializedWorkflow.loops?.[stopAfterBlockId]) {
|
||||
resolvedStopAfterBlockId = buildSentinelEndId(stopAfterBlockId)
|
||||
} else if (serializedWorkflow.parallels?.[stopAfterBlockId]) {
|
||||
resolvedStopAfterBlockId = buildParallelSentinelEndId(stopAfterBlockId)
|
||||
}
|
||||
}
|
||||
|
||||
// Create and execute workflow with callbacks
|
||||
if (resumeFromSnapshot) {
|
||||
logger.info(`[${requestId}] Resume execution detected`, {
|
||||
@@ -296,6 +316,7 @@ export async function executeWorkflowCore(
|
||||
abortSignal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId: resolvedStopAfterBlockId,
|
||||
}
|
||||
|
||||
const executorInstance = new Executor({
|
||||
@@ -318,10 +339,13 @@ export async function executeWorkflowCore(
|
||||
}
|
||||
}
|
||||
|
||||
const result = (await executorInstance.execute(
|
||||
workflowId,
|
||||
resolvedTriggerBlockId
|
||||
)) as ExecutionResult
|
||||
const result = runFromBlock
|
||||
? ((await executorInstance.executeFromBlock(
|
||||
workflowId,
|
||||
runFromBlock.startBlockId,
|
||||
runFromBlock.sourceSnapshot
|
||||
)) as ExecutionResult)
|
||||
: ((await executorInstance.execute(workflowId, resolvedTriggerBlockId)) as ExecutionResult)
|
||||
|
||||
// Build trace spans for logging from the full execution result
|
||||
const { traceSpans, totalDuration } = buildTraceSpans(result)
|
||||
|
||||
@@ -180,3 +180,140 @@ export function formatSSEEvent(event: ExecutionEvent): string {
|
||||
export function encodeSSEEvent(event: ExecutionEvent): Uint8Array {
|
||||
return new TextEncoder().encode(formatSSEEvent(event))
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating SSE execution callbacks
|
||||
*/
|
||||
export interface SSECallbackOptions {
|
||||
executionId: string
|
||||
workflowId: string
|
||||
controller: ReadableStreamDefaultController<Uint8Array>
|
||||
isStreamClosed: () => boolean
|
||||
setStreamClosed: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates SSE callbacks for workflow execution streaming
|
||||
*/
|
||||
export function createSSECallbacks(options: SSECallbackOptions) {
|
||||
const { executionId, workflowId, controller, isStreamClosed, setStreamClosed } = options
|
||||
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (isStreamClosed()) return
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
setStreamClosed()
|
||||
}
|
||||
}
|
||||
|
||||
const onBlockStart = async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
iterationContext?: { iterationCurrent: number; iterationTotal: number; iterationType: string }
|
||||
) => {
|
||||
sendEvent({
|
||||
type: 'block:started',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
...(iterationContext && {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType as any,
|
||||
}),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const onBlockComplete = async (
|
||||
blockId: string,
|
||||
blockName: string,
|
||||
blockType: string,
|
||||
callbackData: { input?: unknown; output: any; executionTime: number },
|
||||
iterationContext?: { iterationCurrent: number; iterationTotal: number; iterationType: string }
|
||||
) => {
|
||||
const hasError = callbackData.output?.error
|
||||
const iterationData = iterationContext
|
||||
? {
|
||||
iterationCurrent: iterationContext.iterationCurrent,
|
||||
iterationTotal: iterationContext.iterationTotal,
|
||||
iterationType: iterationContext.iterationType as any,
|
||||
}
|
||||
: {}
|
||||
|
||||
if (hasError) {
|
||||
sendEvent({
|
||||
type: 'block:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
input: callbackData.input,
|
||||
error: callbackData.output.error,
|
||||
durationMs: callbackData.executionTime || 0,
|
||||
...iterationData,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'block:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
blockId,
|
||||
blockName,
|
||||
blockType,
|
||||
input: callbackData.input,
|
||||
output: callbackData.output,
|
||||
durationMs: callbackData.executionTime || 0,
|
||||
...iterationData,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const onStream = async (streamingExecution: unknown) => {
|
||||
const streamingExec = streamingExecution as { stream: ReadableStream; execution: any }
|
||||
const blockId = streamingExec.execution?.blockId
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId, chunk },
|
||||
})
|
||||
}
|
||||
sendEvent({
|
||||
type: 'stream:done',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { blockId },
|
||||
})
|
||||
} finally {
|
||||
try {
|
||||
reader.releaseLock()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
|
||||
return { sendEvent, onBlockStart, onBlockComplete, onStream }
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ export const PANEL_WIDTH = {
|
||||
|
||||
/** Terminal height constraints */
|
||||
export const TERMINAL_HEIGHT = {
|
||||
DEFAULT: 206,
|
||||
DEFAULT: 155,
|
||||
MIN: 30,
|
||||
/** Maximum is 70% of viewport, enforced dynamically */
|
||||
MAX_PERCENTAGE: 0.7,
|
||||
@@ -58,9 +58,6 @@ export const EDITOR_CONNECTIONS_HEIGHT = {
|
||||
|
||||
/** Output panel (terminal execution results) width constraints */
|
||||
export const OUTPUT_PANEL_WIDTH = {
|
||||
DEFAULT: 560,
|
||||
MIN: 280,
|
||||
DEFAULT: 440,
|
||||
MIN: 440,
|
||||
} as const
|
||||
|
||||
/** Terminal block column width - minimum width for the logs column */
|
||||
export const TERMINAL_BLOCK_COLUMN_WIDTH = 240 as const
|
||||
|
||||
@@ -35,4 +35,23 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
},
|
||||
clearRunPath: () => set({ lastRunPath: new Map(), lastRunEdges: new Map() }),
|
||||
reset: () => set(initialState),
|
||||
|
||||
setLastExecutionSnapshot: (workflowId, snapshot) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
const newSnapshots = new Map(lastExecutionSnapshots)
|
||||
newSnapshots.set(workflowId, snapshot)
|
||||
set({ lastExecutionSnapshots: newSnapshots })
|
||||
},
|
||||
|
||||
getLastExecutionSnapshot: (workflowId) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
return lastExecutionSnapshots.get(workflowId)
|
||||
},
|
||||
|
||||
clearLastExecutionSnapshot: (workflowId) => {
|
||||
const { lastExecutionSnapshots } = get()
|
||||
const newSnapshots = new Map(lastExecutionSnapshots)
|
||||
newSnapshots.delete(workflowId)
|
||||
set({ lastExecutionSnapshots: newSnapshots })
|
||||
},
|
||||
}))
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { Executor } from '@/executor'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type { ExecutionContext } from '@/executor/types'
|
||||
|
||||
/**
|
||||
@@ -18,16 +19,9 @@ export interface ExecutionState {
|
||||
pendingBlocks: string[]
|
||||
executor: Executor | null
|
||||
debugContext: ExecutionContext | null
|
||||
/**
|
||||
* Tracks blocks from the last execution run and their success/error status.
|
||||
* Cleared when a new run starts. Used to show run path indicators (rings on blocks).
|
||||
*/
|
||||
lastRunPath: Map<string, BlockRunStatus>
|
||||
/**
|
||||
* Tracks edges from the last execution run and their success/error status.
|
||||
* Cleared when a new run starts. Used to show run path indicators on edges.
|
||||
*/
|
||||
lastRunEdges: Map<string, EdgeRunStatus>
|
||||
lastExecutionSnapshots: Map<string, SerializableExecutionState>
|
||||
}
|
||||
|
||||
export interface ExecutionActions {
|
||||
@@ -41,6 +35,9 @@ export interface ExecutionActions {
|
||||
setEdgeRunStatus: (edgeId: string, status: EdgeRunStatus) => void
|
||||
clearRunPath: () => void
|
||||
reset: () => void
|
||||
setLastExecutionSnapshot: (workflowId: string, snapshot: SerializableExecutionState) => void
|
||||
getLastExecutionSnapshot: (workflowId: string) => SerializableExecutionState | undefined
|
||||
clearLastExecutionSnapshot: (workflowId: string) => void
|
||||
}
|
||||
|
||||
export const initialState: ExecutionState = {
|
||||
@@ -52,4 +49,5 @@ export const initialState: ExecutionState = {
|
||||
debugContext: null,
|
||||
lastRunPath: new Map(),
|
||||
lastRunEdges: new Map(),
|
||||
lastExecutionSnapshots: new Map(),
|
||||
}
|
||||
|
||||
@@ -129,7 +129,7 @@ export const useSearchModalStore = create<SearchModalState>()(
|
||||
.filter((op) => allowedBlockTypes.has(op.blockType))
|
||||
.map((op) => ({
|
||||
id: op.id,
|
||||
name: `${op.serviceName}: ${op.operationName}`,
|
||||
name: op.operationName,
|
||||
searchValue: `${op.serviceName} ${op.operationName}`,
|
||||
icon: op.icon,
|
||||
bgColor: op.bgColor,
|
||||
|
||||
@@ -339,49 +339,12 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
: update.input
|
||||
}
|
||||
|
||||
if (update.isRunning !== undefined) {
|
||||
updatedEntry.isRunning = update.isRunning
|
||||
}
|
||||
|
||||
if (update.isCanceled !== undefined) {
|
||||
updatedEntry.isCanceled = update.isCanceled
|
||||
}
|
||||
|
||||
if (update.iterationCurrent !== undefined) {
|
||||
updatedEntry.iterationCurrent = update.iterationCurrent
|
||||
}
|
||||
|
||||
if (update.iterationTotal !== undefined) {
|
||||
updatedEntry.iterationTotal = update.iterationTotal
|
||||
}
|
||||
|
||||
if (update.iterationType !== undefined) {
|
||||
updatedEntry.iterationType = update.iterationType
|
||||
}
|
||||
|
||||
return updatedEntry
|
||||
})
|
||||
|
||||
return { entries: updatedEntries }
|
||||
})
|
||||
},
|
||||
|
||||
cancelRunningEntries: (workflowId: string) => {
|
||||
set((state) => {
|
||||
const updatedEntries = state.entries.map((entry) => {
|
||||
if (entry.workflowId === workflowId && entry.isRunning) {
|
||||
return {
|
||||
...entry,
|
||||
isRunning: false,
|
||||
isCanceled: true,
|
||||
endedAt: new Date().toISOString(),
|
||||
}
|
||||
}
|
||||
return entry
|
||||
})
|
||||
return { entries: updatedEntries }
|
||||
})
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: 'terminal-console-store',
|
||||
|
||||
@@ -20,10 +20,6 @@ export interface ConsoleEntry {
|
||||
iterationCurrent?: number
|
||||
iterationTotal?: number
|
||||
iterationType?: SubflowType
|
||||
/** Whether this block is currently running */
|
||||
isRunning?: boolean
|
||||
/** Whether this block execution was canceled */
|
||||
isCanceled?: boolean
|
||||
}
|
||||
|
||||
export interface ConsoleUpdate {
|
||||
@@ -36,14 +32,6 @@ export interface ConsoleUpdate {
|
||||
endedAt?: string
|
||||
durationMs?: number
|
||||
input?: any
|
||||
/** Whether this block is currently running */
|
||||
isRunning?: boolean
|
||||
/** Whether this block execution was canceled */
|
||||
isCanceled?: boolean
|
||||
/** Iteration context for subflow blocks */
|
||||
iterationCurrent?: number
|
||||
iterationTotal?: number
|
||||
iterationType?: SubflowType
|
||||
}
|
||||
|
||||
export interface ConsoleStore {
|
||||
@@ -55,7 +43,6 @@ export interface ConsoleStore {
|
||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||
toggleConsole: () => void
|
||||
updateConsole: (blockId: string, update: string | ConsoleUpdate, executionId?: string) => void
|
||||
cancelRunningEntries: (workflowId: string) => void
|
||||
_hasHydrated: boolean
|
||||
setHasHydrated: (hasHydrated: boolean) => void
|
||||
}
|
||||
|
||||
@@ -69,15 +69,6 @@ export const useTerminalStore = create<TerminalState>()(
|
||||
setWrapText: (wrap) => {
|
||||
set({ wrapText: wrap })
|
||||
},
|
||||
structuredView: true,
|
||||
/**
|
||||
* Enables or disables structured view mode in the output panel.
|
||||
*
|
||||
* @param structured - Whether output should be displayed as nested blocks.
|
||||
*/
|
||||
setStructuredView: (structured) => {
|
||||
set({ structuredView: structured })
|
||||
},
|
||||
/**
|
||||
* Indicates whether the terminal store has finished client-side hydration.
|
||||
*/
|
||||
|
||||
@@ -19,8 +19,6 @@ export interface TerminalState {
|
||||
setOpenOnRun: (open: boolean) => void
|
||||
wrapText: boolean
|
||||
setWrapText: (wrap: boolean) => void
|
||||
structuredView: boolean
|
||||
setStructuredView: (structured: boolean) => void
|
||||
/**
|
||||
* Indicates whether the terminal is currently being resized via mouse drag.
|
||||
*
|
||||
|
||||
@@ -10,6 +10,7 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
||||
'webhookUrlDisplay', // Webhook URL display
|
||||
'samplePayload', // Example payload display
|
||||
'setupScript', // Setup script code (e.g., Apps Script)
|
||||
'scheduleInfo', // Schedule status display (next run, last run)
|
||||
]
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user