feat(hitl): add human in the loop block (#1832)

* fix(billing): should allow restoring subscription (#1728)

* fix(already-cancelled-sub): UI should allow restoring subscription

* restore functionality fixed

* fix

* Add pause resume block

* Add db schema

* Initial test passes

* Tests pass

* Execution pauses

* Snapshot serializer

* Ui checkpoint

* Works 1

* Pause resume simple v1

* Hitl block works in parallel branches without timing overlap

* Pending status to logs

* Pause resume ui link

* Big context consolidation

* HITL works in loops

* Fix parallels

* Reference blocks properly

* Fix tag dropdown and start block resolution

* Filter console logs for hitl block

* Fix notifs

* Fix logs page

* Fix logs page again

* Fix

* Checkpoint

* Cleanup v1

* Refactor v2

* Refactor v3

* Refactor v4

* Refactor v5

* Resume page

* Fix variables in loops

* Fix var res bugs

* Ui changes

* Approval block

* Hitl works e2e v1

* Fix tets

* Row level lock

---------

Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
This commit is contained in:
Siddharth Ganesan
2025-11-06 15:59:28 -08:00
committed by GitHub
parent f9ce65eddf
commit 742d59f54d
90 changed files with 13498 additions and 1128 deletions

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { pausedExecutions, permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -68,6 +68,9 @@ export async function GET(request: NextRequest) {
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
pausedStatus: pausedExecutions.status,
pausedTotalPauseCount: pausedExecutions.totalPauseCount,
pausedResumedCount: pausedExecutions.resumedCount,
}
: {
// Basic mode - exclude large fields for better performance
@@ -92,11 +95,18 @@ export async function GET(request: NextRequest) {
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
pausedStatus: pausedExecutions.status,
pausedTotalPauseCount: pausedExecutions.totalPauseCount,
pausedResumedCount: pausedExecutions.resumedCount,
}
const baseQuery = db
.select(selectColumns)
.from(workflowExecutionLogs)
.leftJoin(
pausedExecutions,
eq(pausedExecutions.executionId, workflowExecutionLogs.executionId)
)
.innerJoin(
workflow,
and(
@@ -186,6 +196,10 @@ export async function GET(request: NextRequest) {
const countQuery = db
.select({ count: sql<number>`count(*)` })
.from(workflowExecutionLogs)
.leftJoin(
pausedExecutions,
eq(pausedExecutions.executionId, workflowExecutionLogs.executionId)
)
.innerJoin(
workflow,
and(
@@ -340,13 +354,18 @@ export async function GET(request: NextRequest) {
return {
id: log.id,
workflowId: log.workflowId,
executionId: params.details === 'full' ? log.executionId : undefined,
executionId: log.executionId,
level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary,
pauseSummary: {
status: log.pausedStatus ?? null,
total: log.pausedTotalPauseCount ?? 0,
resumed: log.pausedResumedCount ?? 0,
},
executionData:
params.details === 'full'
? {
@@ -361,6 +380,10 @@ export async function GET(request: NextRequest) {
params.details === 'full'
? (costSummary as any)
: { total: (costSummary as any)?.total || 0 },
hasPendingPause:
(Number(log.pausedTotalPauseCount ?? 0) > 0 &&
Number(log.pausedResumedCount ?? 0) < Number(log.pausedTotalPauseCount ?? 0)) ||
(log.pausedStatus && log.pausedStatus !== 'fully_resumed'),
}
})
return NextResponse.json(

View File

@@ -0,0 +1,116 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
const logger = createLogger('WorkflowResumeAPI')
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function POST(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string; contextId: string }>
}
) {
const { workflowId, executionId, contextId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const workflow = access.workflow!
let payload: any = {}
try {
payload = await request.json()
} catch {
payload = {}
}
const resumeInput = payload?.input ?? payload ?? {}
const userId = workflow.userId ?? ''
try {
const enqueueResult = await PauseResumeManager.enqueueOrStartResume({
executionId,
contextId,
resumeInput,
userId,
})
if (enqueueResult.status === 'queued') {
return NextResponse.json({
status: 'queued',
executionId: enqueueResult.resumeExecutionId,
queuePosition: enqueueResult.queuePosition,
message: 'Resume queued. It will run after current resumes finish.',
})
}
PauseResumeManager.startResumeExecution({
resumeEntryId: enqueueResult.resumeEntryId,
resumeExecutionId: enqueueResult.resumeExecutionId,
pausedExecution: enqueueResult.pausedExecution,
contextId: enqueueResult.contextId,
resumeInput: enqueueResult.resumeInput,
userId: enqueueResult.userId,
}).catch((error) => {
logger.error('Failed to start resume execution', {
workflowId,
parentExecutionId: executionId,
resumeExecutionId: enqueueResult.resumeExecutionId,
error,
})
})
return NextResponse.json({
status: 'started',
executionId: enqueueResult.resumeExecutionId,
message: 'Resume execution started.',
})
} catch (error: any) {
logger.error('Resume request failed', {
workflowId,
executionId,
contextId,
error,
})
return NextResponse.json(
{ error: error.message || 'Failed to queue resume request' },
{ status: 400 }
)
}
}
export async function GET(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string; contextId: string }>
}
) {
const { workflowId, executionId, contextId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const detail = await PauseResumeManager.getPauseContextDetail({
workflowId,
executionId,
contextId,
})
if (!detail) {
return NextResponse.json({ error: 'Pause context not found' }, { status: 404 })
}
return NextResponse.json(detail)
}

View File

@@ -0,0 +1,48 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
const logger = createLogger('WorkflowResumeExecutionAPI')
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string }>
}
) {
const { workflowId, executionId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
try {
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
if (!detail) {
return NextResponse.json({ error: 'Paused execution not found' }, { status: 404 })
}
return NextResponse.json(detail)
} catch (error: any) {
logger.error('Failed to load paused execution detail', {
workflowId,
executionId,
error,
})
return NextResponse.json(
{ error: error?.message || 'Failed to load paused execution detail' },
{ status: 500 }
)
}
}

View File

@@ -12,6 +12,7 @@ import {
} from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { StreamingExecution } from '@/executor/types'
@@ -135,6 +136,24 @@ export async function executeWorkflow(
loggingSession,
})
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
if (streamConfig?.skipLoggingComplete) {
return {
...result,
@@ -605,6 +624,24 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
loggingSession,
})
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
if (result.error === 'Workflow execution was cancelled') {
logger.info(`[${requestId}] Workflow execution was cancelled`)
sendEvent({

View File

@@ -0,0 +1,34 @@
import { type NextRequest, NextResponse } from 'next/server'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: { id: string; executionId: string }
}
) {
const workflowId = params.id
const executionId = params.executionId
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
if (!detail) {
return NextResponse.json({ error: 'Paused execution not found' }, { status: 404 })
}
return NextResponse.json(detail)
}

View File

@@ -0,0 +1,31 @@
import { type NextRequest, NextResponse } from 'next/server'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: { id: string }
}
) {
const workflowId = params.id
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const statusFilter = request.nextUrl.searchParams.get('status') || undefined
const pausedExecutions = await PauseResumeManager.listPausedExecutions({
workflowId,
status: statusFilter,
})
return NextResponse.json({ pausedExecutions })
}

View File

@@ -0,0 +1,15 @@
import { redirect } from 'next/navigation'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
interface PageParams {
workflowId: string
executionId: string
contextId: string
}
export default async function ResumePage({ params }: { params: Promise<PageParams> }) {
const { workflowId, executionId, contextId } = await params
redirect(`/resume/${workflowId}/${executionId}?contextId=${contextId}`)
}

View File

@@ -0,0 +1,40 @@
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import ResumeExecutionPage from './resume-page-client'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
interface PageParams {
workflowId: string
executionId: string
}
export default async function ResumeExecutionPageWrapper({
params,
searchParams,
}: {
params: Promise<PageParams>
searchParams: Promise<Record<string, string | string[] | undefined>>
}) {
const resolvedParams = await params
const resolvedSearchParams = await searchParams
const { workflowId, executionId } = resolvedParams
const initialContextIdParam = resolvedSearchParams?.contextId
const initialContextId = Array.isArray(initialContextIdParam)
? initialContextIdParam[0]
: initialContextIdParam
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
return (
<ResumeExecutionPage
params={resolvedParams}
initialExecutionDetail={detail ? JSON.parse(JSON.stringify(detail)) : null}
initialContextId={initialContextId}
/>
)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,6 @@
import { useEffect, useMemo, useRef, useState } from 'react'
import { Info, Loader2 } from 'lucide-react'
import { ArrowUpRight, Info, Loader2 } from 'lucide-react'
import Link from 'next/link'
import { useRouter } from 'next/navigation'
import { cn } from '@/lib/utils'
import LineChart, {
@@ -27,6 +28,7 @@ export interface ExecutionLogItem {
} | null
workflowName?: string
workflowColor?: string
hasPendingPause?: boolean
}
export interface WorkflowDetailsData {
@@ -263,7 +265,7 @@ export function WorkflowDetails({
<div className='w-full overflow-x-auto'>
<div>
<div className='border-border border-b'>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] gap-2 px-2 pb-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px_40px] gap-2 px-2 pb-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div className='font-[460] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
@@ -285,6 +287,9 @@ export function WorkflowDetails({
<div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Duration
</div>
<div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Resume
</div>
</div>
</div>
</div>
@@ -317,6 +322,12 @@ export function WorkflowDetails({
const outputsStr = log.outputs ? JSON.stringify(log.outputs) : '—'
const errorStr = log.errorMessage || ''
const isExpanded = expandedRowId === log.id
const baseLevel = (log.level || 'info').toLowerCase()
const isPending = log.hasPendingPause === true
const isError = baseLevel === 'error'
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return (
<div
@@ -329,7 +340,7 @@ export function WorkflowDetails({
setExpandedRowId((prev) => (prev === log.id ? null : log.id))
}
>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] items-center gap-2 px-2 py-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px_40px] items-center gap-2 px-2 py-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div>
<div className='text-[13px]'>
<span className='font-sm text-muted-foreground'>
@@ -348,12 +359,14 @@ export function WorkflowDetails({
<div
className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-[400] text-xs transition-all duration-200 lg:px-[8px]',
log.level === 'error'
isError
? 'bg-red-500 text-white'
: 'bg-secondary text-card-foreground'
: isPending
? 'bg-amber-300 text-amber-900 dark:bg-amber-500/90 dark:text-black'
: 'bg-secondary text-card-foreground'
)}
>
{log.level}
{statusLabel}
</div>
</div>
@@ -423,6 +436,20 @@ export function WorkflowDetails({
{typeof log.duration === 'number' ? `${log.duration}ms` : '—'}
</div>
</div>
<div className='flex justify-end'>
{isPending && log.executionId ? (
<Link
href={`/resume/${expandedWorkflowId}/${log.executionId}`}
className='inline-flex h-7 w-7 items-center justify-center rounded-md border border-primary/60 border-dashed text-primary hover:bg-primary/10'
aria-label='Open resume console'
>
<ArrowUpRight className='h-4 w-4' />
</Link>
) : (
<span className='h-7 w-7' />
)}
</div>
</div>
{isExpanded && (
<div className='px-2 pt-0 pb-4'>

View File

@@ -432,13 +432,22 @@ export function Sidebar({
</div>
)}
{/* Level */}
{/* Status */}
<div>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Level</h3>
<div className='group relative text-sm capitalize'>
<CopyButton text={log.level} />
{log.level}
</div>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Status</h3>
{(() => {
const baseLevel = (log.level || 'info').toLowerCase()
const isPending = log.duration == null
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return (
<div className='group relative text-sm capitalize'>
<CopyButton text={statusLabel} />
{statusLabel}
</div>
)
})()}
</div>
{/* Trigger */}

View File

@@ -1,7 +1,8 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
import { AlertCircle, Info, Loader2 } from 'lucide-react'
import { AlertCircle, ArrowUpRight, Info, Loader2 } from 'lucide-react'
import Link from 'next/link'
import { useParams } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
@@ -775,6 +776,13 @@ export default function Logs() {
{logs.map((log) => {
const formattedDate = formatDate(log.createdAt)
const isSelected = selectedLog?.id === log.id
const baseLevel = (log.level || 'info').toLowerCase()
const isError = baseLevel === 'error'
// If it's an error, don't treat it as pending even if hasPendingPause is true
const isPending = !isError && log.hasPendingPause === true
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return (
<div
@@ -785,7 +793,7 @@ export default function Logs() {
}`}
onClick={() => handleLogClick(log)}
>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px_40px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px_40px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px_40px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px_40px]'>
{/* Time */}
<div>
<div className='text-[13px]'>
@@ -806,12 +814,14 @@ export default function Logs() {
<div
className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-medium text-xs transition-all duration-200 lg:px-[8px]',
log.level === 'error'
isError
? 'bg-red-500 text-white'
: 'bg-secondary text-card-foreground'
: isPending
? 'bg-amber-300 text-amber-900 dark:bg-amber-500/90 dark:text-black'
: 'bg-secondary text-card-foreground'
)}
>
{log.level}
{statusLabel}
</div>
</div>
@@ -860,6 +870,23 @@ export default function Logs() {
{log.duration || '—'}
</div>
</div>
{/* Resume Link */}
<div className='flex justify-end'>
{isPending &&
log.executionId &&
(log.workflow?.id || log.workflowId) ? (
<Link
href={`/resume/${log.workflow?.id || log.workflowId}/${log.executionId}`}
className='inline-flex h-7 w-7 items-center justify-center rounded-md border border-primary/60 border-dashed text-primary hover:bg-primary/10'
aria-label='Open resume console'
>
<ArrowUpRight className='h-4 w-4' />
</Link>
) : (
<span className='h-7 w-7' />
)}
</div>
</div>
</div>
)

View File

@@ -95,6 +95,7 @@ export interface ExecutionLog {
} | null
workflowName?: string
workflowColor?: string
hasPendingPause?: boolean
}
/**
@@ -133,6 +134,7 @@ export function mapToExecutionLog(log: any): ExecutionLog {
: null,
workflowName: log.workflowName || log.workflow?.name,
workflowColor: log.workflowColor || log.workflow?.color,
hasPendingPause: log.hasPendingPause === true,
}
}
@@ -164,6 +166,7 @@ export function mapToExecutionLogAlt(log: any): ExecutionLog {
: null,
workflowName: log.workflow?.name,
workflowColor: log.workflow?.color,
hasPendingPause: log.hasPendingPause === true,
}
}

View File

@@ -17,6 +17,7 @@ import {
import { decryptSecret } from '@/lib/utils'
import { blockExistsInDeployment, loadDeployedWorkflowState } from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import { Serializer } from '@/serializer'
@@ -452,6 +453,24 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
loggingSession,
})
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Workflow execution completed: ${payload.workflowId}`, {
success: executionResult.success,
executionTime: executionResult.metadata?.duration,

View File

@@ -17,6 +17,7 @@ import {
loadWorkflowFromNormalizedTables,
} from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult } from '@/executor/types'
@@ -250,6 +251,24 @@ async function executeWebhookJobInternal(
loggingSession,
})
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Airtable webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
@@ -445,6 +464,24 @@ async function executeWebhookJobInternal(
loggingSession,
})
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,

View File

@@ -7,6 +7,7 @@ import { checkServerSideUsageLimits } from '@/lib/billing'
import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
@@ -119,6 +120,24 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
loggingSession,
})
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: result.success,
executionTime: result.metadata?.duration,

View File

@@ -0,0 +1,169 @@
import type { SVGProps } from 'react'
import { createElement } from 'react'
import { UserCheck } from 'lucide-react'
import type { BlockConfig } from '@/blocks/types'
import type { ResponseBlockOutput } from '@/tools/response/types'
const ApprovalIcon = (props: SVGProps<SVGSVGElement>) => createElement(UserCheck, props)
export const PauseResumeBlock: BlockConfig<ResponseBlockOutput> = {
type: 'approval',
name: 'Approval',
description: 'Pause workflow execution and send structured API response',
longDescription:
'Combines response and start functionality. Sends structured responses and allows workflow to resume from this point.',
category: 'blocks',
bgColor: '#10B981',
icon: ApprovalIcon,
subBlocks: [
// Operation dropdown hidden - block defaults to human approval mode
// {
// id: 'operation',
// title: 'Operation',
// type: 'dropdown',
// layout: 'full',
// options: [
// { label: 'Human Approval', id: 'human' },
// { label: 'API Response', id: 'api' },
// ],
// value: () => 'human',
// description: 'Choose whether to wait for human approval or send an API response',
// },
{
id: 'builderData',
title: 'Paused Output',
type: 'response-format',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description:
'Define the structure of your response data. Use <variable.name> in field names to reference workflow variables.',
},
{
id: 'notification',
title: 'Notification',
type: 'tool-input',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description: 'Configure notification tools to alert approvers (e.g., Slack, Email)',
defaultValue: [],
},
// API mode subBlocks commented out - only human approval mode is supported
// {
// id: 'dataMode',
// title: 'Response Data Mode',
// type: 'dropdown',
// layout: 'full',
// options: [
// { label: 'Builder', id: 'structured' },
// { label: 'Editor', id: 'json' },
// ],
// value: () => 'structured',
// condition: { field: 'operation', value: 'api' },
// description: 'Choose how to define your response data structure',
// },
{
id: 'inputFormat',
title: 'Resume Input',
type: 'input-format',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description: 'Define the fields the approver can fill in when resuming',
},
// {
// id: 'data',
// title: 'Response Data',
// type: 'code',
// layout: 'full',
// placeholder: '{\n "message": "Hello world",\n "userId": "<variable.userId>"\n}',
// language: 'json',
// condition: {
// field: 'operation',
// value: 'api',
// and: { field: 'dataMode', value: 'json' },
// },
// description:
// 'Data that will be sent as the response body on API calls. Use <variable.name> to reference workflow variables.',
// wandConfig: {
// enabled: true,
// maintainHistory: true,
// prompt: `You are an expert JSON programmer.
// Generate ONLY the raw JSON object based on the user's request.
// The output MUST be a single, valid JSON object, starting with { and ending with }.
//
// Current response: {context}
//
// Do not include any explanations, markdown formatting, or other text outside the JSON object.
//
// You have access to the following variables you can use to generate the JSON body:
// - 'params' (object): Contains input parameters derived from the JSON schema. Access these directly using the parameter name wrapped in angle brackets, e.g., '<paramName>'. Do NOT use 'params.paramName'.
// - 'environmentVariables' (object): Contains environment variables. Reference these using the double curly brace syntax: '{{ENV_VAR_NAME}}'. Do NOT use 'environmentVariables.VAR_NAME' or env.
//
// Example:
// {
// "name": "<block.agent.response.content>",
// "age": <block.function.output.age>,
// "success": true
// }`,
// placeholder: 'Describe the API response structure you need...',
// generationType: 'json-object',
// },
// },
// {
// id: 'status',
// title: 'Status Code',
// type: 'short-input',
// layout: 'half',
// placeholder: '200',
// condition: { field: 'operation', value: 'api' },
// description: 'HTTP status code (default: 200)',
// },
// {
// id: 'headers',
// title: 'Response Headers',
// type: 'table',
// layout: 'full',
// columns: ['Key', 'Value'],
// condition: { field: 'operation', value: 'api' },
// description: 'Additional HTTP headers to include in the response',
// },
],
tools: { access: [] },
inputs: {
operation: {
type: 'string',
description: 'Operation mode: human or api',
},
inputFormat: {
type: 'json',
description: 'Input fields for resume',
},
notification: {
type: 'json',
description: 'Notification tools configuration',
},
dataMode: {
type: 'string',
description: 'Response data definition mode',
},
builderData: {
type: 'json',
description: 'Structured response data',
},
data: {
type: 'json',
description: 'JSON response body',
},
status: {
type: 'number',
description: 'HTTP status code',
},
headers: {
type: 'json',
description: 'Response headers',
},
},
outputs: {
uiUrl: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
},
}

View File

@@ -50,6 +50,7 @@ import { OneDriveBlock } from '@/blocks/blocks/onedrive'
import { OpenAIBlock } from '@/blocks/blocks/openai'
import { OutlookBlock } from '@/blocks/blocks/outlook'
import { ParallelBlock } from '@/blocks/blocks/parallel'
import { PauseResumeBlock } from '@/blocks/blocks/pause_resume'
import { PerplexityBlock } from '@/blocks/blocks/perplexity'
import { PineconeBlock } from '@/blocks/blocks/pinecone'
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
@@ -95,6 +96,7 @@ export const registry: Record<string, BlockConfig> = {
agent: AgentBlock,
airtable: AirtableBlock,
api: ApiBlock,
approval: PauseResumeBlock,
arxiv: ArxivBlock,
browser_use: BrowserUseBlock,
clay: ClayBlock,

View File

@@ -505,6 +505,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
}
} else if (sourceBlock.type === 'approval') {
// For approval block, use dynamic outputs based on inputFormat
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
// If it's a self-reference, only show uiUrl (available immediately)
const isSelfReference = activeSourceBlockId === blockId
if (dynamicOutputs.length > 0) {
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
}
} else {
// Check for tool-specific outputs first
const operationValue =
@@ -698,7 +713,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
if (!accessibleBlock) continue
// Skip the current block - blocks cannot reference their own outputs
if (accessibleBlockId === blockId) continue
// Exception: approval blocks can reference their own outputs
if (accessibleBlockId === blockId && accessibleBlock.type !== 'approval') continue
const blockConfig = getBlock(accessibleBlock.type)
@@ -817,6 +833,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
}
} else if (accessibleBlock.type === 'approval') {
// For approval block, use dynamic outputs based on inputFormat
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
// If it's a self-reference, only show uiUrl (available immediately)
const isSelfReference = accessibleBlockId === blockId
if (dynamicOutputs.length > 0) {
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
}
} else {
// Check for tool-specific outputs first
const operationValue =

View File

@@ -130,7 +130,7 @@ export const setupExecutorCoreMocks = () => {
LoopManager: vi.fn().mockImplementation(() => ({
processLoopIterations: vi.fn().mockResolvedValue(false),
getLoopIndex: vi.fn().mockImplementation((loopId, blockId, context) => {
return context.loopIterations?.get(loopId) || 0
return context.loopExecutions?.get(loopId)?.iteration || 0
}),
})),
}))
@@ -463,8 +463,7 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
*/
export interface MockContextOptions {
workflowId?: string
loopIterations?: Map<string, number>
loopItems?: Map<string, any>
loopExecutions?: Map<string, any>
executedBlocks?: Set<string>
activeExecutionPath?: Set<string>
completedLoops?: Set<string>
@@ -485,13 +484,12 @@ export const createMockContext = (options: MockContextOptions = {}) => {
metadata: { startTime: new Date().toISOString(), duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: options.loopIterations || new Map(),
loopItems: options.loopItems || new Map(),
loopExecutions: options.loopExecutions || new Map(),
executedBlocks: options.executedBlocks || new Set<string>(),
activeExecutionPath: options.activeExecutionPath || new Set<string>(),
workflow,
completedLoops: options.completedLoops || new Set<string>(),
parallelExecutions: options.parallelExecutions,
parallelExecutions: options.parallelExecutions || new Map(),
parallelBlockMapping: options.parallelBlockMapping,
currentVirtualBlockId: options.currentVirtualBlockId,
}
@@ -509,7 +507,7 @@ export const createLoopManagerMock = (options?: {
getLoopIndex:
options?.getLoopIndexImpl ||
vi.fn().mockImplementation((loopId, blockId, context) => {
return context.loopIterations.get(loopId) || 0
return context.loopExecutions?.get(loopId)?.iteration || 0
}),
})),
})

View File

@@ -1,72 +1,42 @@
/**
* Central constants and types for the executor
*
* Consolidates all magic strings, block types, edge handles, and type definitions
* used throughout the executor to eliminate duplication and improve type safety.
*/
/**
* Block types
*/
export enum BlockType {
// Control flow
PARALLEL = 'parallel',
LOOP = 'loop',
ROUTER = 'router',
CONDITION = 'condition',
// Triggers
START_TRIGGER = 'start_trigger',
STARTER = 'starter',
TRIGGER = 'trigger',
// Data processing
FUNCTION = 'function',
AGENT = 'agent',
API = 'api',
EVALUATOR = 'evaluator',
VARIABLES = 'variables',
// I/O
RESPONSE = 'response',
APPROVAL = 'approval',
WORKFLOW = 'workflow',
WORKFLOW_INPUT = 'workflow_input',
// Utilities
WAIT = 'wait',
// Infrastructure (virtual blocks)
SENTINEL_START = 'sentinel_start',
SENTINEL_END = 'sentinel_end',
}
/**
* Trigger block types (blocks that can start a workflow)
*/
export const TRIGGER_BLOCK_TYPES = [
BlockType.START_TRIGGER,
BlockType.STARTER,
BlockType.TRIGGER,
] as const
/**
* Metadata-only block types (not executable, just configuration)
*/
export const METADATA_ONLY_BLOCK_TYPES = [BlockType.LOOP, BlockType.PARALLEL] as const
/**
* Loop types
*/
export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile'
/**
* Sentinel types
*/
export type SentinelType = 'start' | 'end'
/**
* Parallel types
*/
export type ParallelType = 'collection' | 'count'
export const EDGE = {
@@ -82,11 +52,7 @@ export const EDGE = {
DEFAULT: 'default',
} as const
/**
* Loop configuration
*/
export const LOOP = {
// Loop types
TYPE: {
FOR: 'for' as LoopType,
FOR_EACH: 'forEach' as LoopType,
@@ -94,39 +60,31 @@ export const LOOP = {
DO_WHILE: 'doWhile',
},
// Sentinel node naming
SENTINEL: {
PREFIX: 'loop-',
START_SUFFIX: '-sentinel-start',
END_SUFFIX: '-sentinel-end',
START_TYPE: 'start' as SentinelType,
END_TYPE: 'end' as SentinelType,
START_NAME_PREFIX: 'Loop Start',
END_NAME_PREFIX: 'Loop End',
},
} as const
/**
* Parallel configuration
*/
export const PARALLEL = {
// Parallel types
TYPE: {
COLLECTION: 'collection' as ParallelType,
COUNT: 'count' as ParallelType,
},
// Branch notation
BRANCH: {
PREFIX: '₍',
SUFFIX: '₎',
},
// Default values
DEFAULT_COUNT: 1,
} as const
/**
* Reference syntax for variable resolution
*/
export const REFERENCE = {
START: '<',
END: '>',
@@ -146,9 +104,6 @@ export const SPECIAL_REFERENCE_PREFIXES = [
REFERENCE.PREFIX.VARIABLE,
] as const
/**
* Loop reference fields
*/
export const LOOP_REFERENCE = {
ITERATION: 'iteration',
INDEX: 'index',
@@ -156,9 +111,6 @@ export const LOOP_REFERENCE = {
INDEX_PATH: 'loop.index',
} as const
/**
* Parallel reference fields
*/
export const PARALLEL_REFERENCE = {
INDEX: 'index',
CURRENT_ITEM: 'currentItem',
@@ -223,15 +175,44 @@ export const CONDITION = {
ELSE_TITLE: 'else',
} as const
export const PAUSE_RESUME = {
OPERATION: {
HUMAN: 'human',
API: 'api',
},
PATH: {
API_RESUME: '/api/resume',
UI_RESUME: '/resume',
},
} as const
export function buildResumeApiUrl(
baseUrl: string | undefined,
workflowId: string,
executionId: string,
contextId: string
): string {
const prefix = baseUrl ?? ''
return `${prefix}${PAUSE_RESUME.PATH.API_RESUME}/${workflowId}/${executionId}/${contextId}`
}
export function buildResumeUiUrl(
baseUrl: string | undefined,
workflowId: string,
executionId: string
): string {
const prefix = baseUrl ?? ''
return `${prefix}${PAUSE_RESUME.PATH.UI_RESUME}/${workflowId}/${executionId}`
}
export const PARSING = {
JSON_RADIX: 10,
PREVIEW_LENGTH: 200,
PREVIEW_SUFFIX: '...',
} as const
/**
* Condition configuration
*/
export type FieldType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' | 'plain'
export interface ConditionConfig {
id: string
label?: string

View File

@@ -0,0 +1,68 @@
import { describe, expect, it, vi } from 'vitest'
import { BlockType } from '@/executor/consts'
import { DAGBuilder } from '@/executor/dag/builder'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn().mockReturnValue({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
function createBlock(id: string, metadataId: string): SerializedBlock {
return {
id,
position: { x: 0, y: 0 },
config: {
tool: 'noop',
params: {},
},
inputs: {},
outputs: {},
metadata: {
id: metadataId,
name: id,
},
enabled: true,
}
}
describe('DAGBuilder pause-resume transformation', () => {
it('creates trigger nodes and rewires edges for pause blocks', () => {
const workflow: SerializedWorkflow = {
version: '1',
blocks: [
createBlock('start', BlockType.STARTER),
createBlock('pause', BlockType.APPROVAL),
createBlock('finish', BlockType.FUNCTION),
],
connections: [
{ source: 'start', target: 'pause' },
{ source: 'pause', target: 'finish' },
],
loops: {},
}
const builder = new DAGBuilder()
const dag = builder.build(workflow)
const pauseNode = dag.nodes.get('pause')
expect(pauseNode).toBeDefined()
expect(pauseNode?.metadata.isPauseResponse).toBe(true)
const startNode = dag.nodes.get('start')!
const startOutgoing = Array.from(startNode.outgoingEdges.values())
expect(startOutgoing).toHaveLength(1)
expect(startOutgoing[0].target).toBe('pause')
const pauseOutgoing = Array.from(pauseNode!.outgoingEdges.values())
expect(pauseOutgoing).toHaveLength(1)
expect(pauseOutgoing[0].target).toBe('finish')
const triggerNode = dag.nodes.get('pause__trigger')
expect(triggerNode).toBeUndefined()
})
})

View File

@@ -1,15 +1,15 @@
import { createLogger } from '@/lib/logs/console/logger'
import { EdgeConstructor } from '@/executor/dag/construction/edges'
import { LoopConstructor } from '@/executor/dag/construction/loops'
import { NodeConstructor } from '@/executor/dag/construction/nodes'
import { PathConstructor } from '@/executor/dag/construction/paths'
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
import type {
SerializedBlock,
SerializedLoop,
SerializedParallel,
SerializedWorkflow,
} from '@/serializer/types'
import { EdgeConstructor } from './construction/edges'
import { LoopConstructor } from './construction/loops'
import { NodeConstructor } from './construction/nodes'
import { PathConstructor } from './construction/paths'
import type { DAGEdge, NodeMetadata } from './types'
const logger = createLogger('DAGBuilder')
@@ -33,7 +33,11 @@ export class DAGBuilder {
private nodeConstructor = new NodeConstructor()
private edgeConstructor = new EdgeConstructor()
build(workflow: SerializedWorkflow, triggerBlockId?: string): DAG {
build(
workflow: SerializedWorkflow,
triggerBlockId?: string,
savedIncomingEdges?: Record<string, string[]>
): DAG {
const dag: DAG = {
nodes: new Map(),
loopConfigs: new Map(),
@@ -43,26 +47,46 @@ export class DAGBuilder {
this.initializeConfigs(workflow, dag)
const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId)
logger.debug('Reachable blocks from trigger:', {
triggerBlockId,
reachableCount: reachableBlocks.size,
totalBlocks: workflow.blocks.length,
})
this.loopConstructor.execute(dag, reachableBlocks)
const { blocksInLoops, blocksInParallels } = this.nodeConstructor.execute(
const { blocksInLoops, blocksInParallels, pauseTriggerMapping } = this.nodeConstructor.execute(
workflow,
dag,
reachableBlocks
)
this.edgeConstructor.execute(workflow, dag, blocksInParallels, blocksInLoops, reachableBlocks)
this.edgeConstructor.execute(
workflow,
dag,
blocksInParallels,
blocksInLoops,
reachableBlocks,
pauseTriggerMapping
)
if (savedIncomingEdges) {
logger.info('Restoring DAG incoming edges from snapshot', {
nodeCount: Object.keys(savedIncomingEdges).length,
})
for (const [nodeId, incomingEdgeArray] of Object.entries(savedIncomingEdges)) {
const node = dag.nodes.get(nodeId)
if (node) {
node.incomingEdges = new Set(incomingEdgeArray)
}
}
}
logger.info('DAG built', {
totalNodes: dag.nodes.size,
loopCount: dag.loopConfigs.size,
parallelCount: dag.parallelConfigs.size,
allNodeIds: Array.from(dag.nodes.keys()),
triggerNodes: Array.from(dag.nodes.values())
.filter((n) => n.metadata?.isResumeTrigger)
.map((n) => ({ id: n.id, originalBlockId: n.metadata?.originalBlockId })),
})
return dag

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import { EDGE, isConditionBlockType, isRouterBlockType } from '@/executor/consts'
import type { DAG } from '@/executor/dag/builder'
import {
buildBranchNodeId,
buildSentinelEndId,
@@ -9,7 +10,6 @@ import {
parseDistributionItems,
} from '@/executor/utils/subflow-utils'
import type { SerializedWorkflow } from '@/serializer/types'
import type { DAG } from '../builder'
const logger = createLogger('EdgeConstructor')
@@ -31,11 +31,13 @@ export class EdgeConstructor {
dag: DAG,
blocksInParallels: Set<string>,
blocksInLoops: Set<string>,
reachableBlocks: Set<string>
reachableBlocks: Set<string>,
pauseTriggerMapping: Map<string, string>
): void {
const loopBlockIds = new Set(dag.loopConfigs.keys())
const parallelBlockIds = new Set(dag.parallelConfigs.keys())
const metadata = this.buildMetadataMaps(workflow)
this.wireRegularEdges(
workflow,
dag,
@@ -44,21 +46,26 @@ export class EdgeConstructor {
reachableBlocks,
loopBlockIds,
parallelBlockIds,
metadata
metadata,
pauseTriggerMapping
)
this.wireLoopSentinels(dag, reachableBlocks)
this.wireParallelBlocks(workflow, dag, loopBlockIds, parallelBlockIds)
this.wireParallelBlocks(workflow, dag, loopBlockIds, parallelBlockIds, pauseTriggerMapping)
}
private buildMetadataMaps(workflow: SerializedWorkflow): EdgeMetadata {
const blockTypeMap = new Map<string, string>()
const conditionConfigMap = new Map<string, ConditionConfig[]>()
const routerBlockIds = new Set<string>()
for (const block of workflow.blocks) {
const blockType = block.metadata?.id ?? ''
blockTypeMap.set(block.id, blockType)
if (isConditionBlockType(blockType)) {
const conditions = this.parseConditionConfig(block)
if (conditions) {
conditionConfigMap.set(block.id, conditions)
}
@@ -66,24 +73,29 @@ export class EdgeConstructor {
routerBlockIds.add(block.id)
}
}
return { blockTypeMap, conditionConfigMap, routerBlockIds }
}
private parseConditionConfig(block: any): ConditionConfig[] | null {
try {
const conditionsJson = block.config.params?.conditions
if (typeof conditionsJson === 'string') {
return JSON.parse(conditionsJson)
}
if (Array.isArray(conditionsJson)) {
return conditionsJson
}
return null
} catch (error) {
logger.warn('Failed to parse condition config', {
blockId: block.id,
error: error instanceof Error ? error.message : String(error),
})
return null
}
}
@@ -96,21 +108,25 @@ export class EdgeConstructor {
workflow: SerializedWorkflow
): string | undefined {
let handle = sourceHandle
if (!handle && isConditionBlockType(metadata.blockTypeMap.get(source) ?? '')) {
const conditions = metadata.conditionConfigMap.get(source)
if (conditions && conditions.length > 0) {
const edgesFromCondition = workflow.connections.filter((c) => c.source === source)
const edgeIndex = edgesFromCondition.findIndex((e) => e.target === target)
if (edgeIndex >= 0 && edgeIndex < conditions.length) {
const correspondingCondition = conditions[edgeIndex]
handle = `${EDGE.CONDITION_PREFIX}${correspondingCondition.id}`
}
}
}
if (metadata.routerBlockIds.has(source)) {
handle = `${EDGE.ROUTER_PREFIX}${target}`
logger.debug('Set router sourceHandle', { source, target, sourceHandle: handle })
}
return handle
}
@@ -122,10 +138,12 @@ export class EdgeConstructor {
reachableBlocks: Set<string>,
loopBlockIds: Set<string>,
parallelBlockIds: Set<string>,
metadata: EdgeMetadata
metadata: EdgeMetadata,
pauseTriggerMapping: Map<string, string>
): void {
for (const connection of workflow.connections) {
let { source, target } = connection
const originalSource = source
let sourceHandle = this.generateSourceHandle(
source,
target,
@@ -138,6 +156,7 @@ export class EdgeConstructor {
const targetIsLoopBlock = loopBlockIds.has(target)
const sourceIsParallelBlock = parallelBlockIds.has(source)
const targetIsParallelBlock = parallelBlockIds.has(target)
if (
sourceIsLoopBlock ||
targetIsLoopBlock ||
@@ -146,38 +165,42 @@ export class EdgeConstructor {
) {
if (sourceIsLoopBlock) {
const sentinelEndId = buildSentinelEndId(source)
if (!dag.nodes.has(sentinelEndId)) {
logger.debug('Skipping loop exit edge - sentinel not found', { source, target })
continue
}
source = sentinelEndId
sourceHandle = EDGE.LOOP_EXIT
logger.debug('Redirected loop exit edge', { from: sentinelEndId, to: target })
}
if (targetIsLoopBlock) {
const sentinelStartId = buildSentinelStartId(target)
if (!dag.nodes.has(sentinelStartId)) {
logger.debug('Skipping loop entry edge - sentinel not found', { source, target })
continue
}
target = sentinelStartId
logger.debug('Redirected loop entry edge', { from: source, to: sentinelStartId })
}
if (sourceIsParallelBlock || targetIsParallelBlock) {
continue
}
}
if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) {
logger.debug('Skipping edge that crosses loop boundary', { source, target })
continue
}
if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) {
logger.debug('Skipping edge - not reachable', { source, target })
continue
}
if (blocksInParallels.has(source) && blocksInParallels.has(target)) {
const sourceParallelId = this.getParallelId(source, dag)
const targetParallelId = this.getParallelId(target, dag)
if (sourceParallelId === targetParallelId) {
this.wireParallelInternalEdge(
source,
@@ -185,18 +208,16 @@ export class EdgeConstructor {
sourceParallelId!,
dag,
sourceHandle,
targetHandle
targetHandle,
pauseTriggerMapping
)
} else {
logger.warn('Edge between different parallels - invalid workflow', { source, target })
}
} else if (blocksInParallels.has(source) || blocksInParallels.has(target)) {
logger.debug('Skipping internal-to-external edge (handled by parallel wiring)', {
source,
target,
})
} else {
this.addEdge(dag, source, target, sourceHandle, targetHandle)
const resolvedSource = pauseTriggerMapping.get(originalSource) ?? source
this.addEdge(dag, resolvedSource, target, sourceHandle, targetHandle)
}
}
}
@@ -204,27 +225,27 @@ export class EdgeConstructor {
private wireLoopSentinels(dag: DAG, reachableBlocks: Set<string>): void {
for (const [loopId, loopConfig] of dag.loopConfigs) {
const nodes = loopConfig.nodes
if (nodes.length === 0) continue
const sentinelStartId = buildSentinelStartId(loopId)
const sentinelEndId = buildSentinelEndId(loopId)
if (!dag.nodes.has(sentinelStartId) || !dag.nodes.has(sentinelEndId)) {
logger.debug('Skipping sentinel wiring for unreachable loop', { loopId })
continue
}
const { startNodes, terminalNodes } = this.findLoopBoundaryNodes(nodes, dag, reachableBlocks)
logger.debug('Wiring sentinel nodes for loop', {
loopId,
startNodes,
terminalNodes,
})
for (const startNodeId of startNodes) {
this.addEdge(dag, sentinelStartId, startNodeId)
}
for (const terminalNodeId of terminalNodes) {
this.addEdge(dag, terminalNodeId, sentinelEndId)
}
this.addEdge(dag, sentinelEndId, sentinelStartId, EDGE.LOOP_CONTINUE, undefined, true)
logger.debug('Added backward edge for loop', { loopId })
}
}
@@ -232,26 +253,33 @@ export class EdgeConstructor {
workflow: SerializedWorkflow,
dag: DAG,
loopBlockIds: Set<string>,
parallelBlockIds: Set<string>
parallelBlockIds: Set<string>,
pauseTriggerMapping: Map<string, string>
): void {
for (const [parallelId, parallelConfig] of dag.parallelConfigs) {
const nodes = parallelConfig.nodes
if (nodes.length === 0) continue
const { entryNodes, terminalNodes, branchCount } = this.findParallelBoundaryNodes(
nodes,
parallelId,
dag
)
logger.info('Wiring parallel block edges', {
parallelId,
entryNodes,
terminalNodes,
branchCount,
})
for (const connection of workflow.connections) {
const { source, target, sourceHandle, targetHandle } = connection
if (target === parallelId) {
if (loopBlockIds.has(source) || parallelBlockIds.has(source)) continue
if (nodes.includes(source)) {
logger.warn('Invalid: parallel block connected from its own internal node', {
parallelId,
@@ -259,18 +287,23 @@ export class EdgeConstructor {
})
continue
}
logger.info('Wiring edge to parallel block', { source, parallelId, entryNodes })
for (const entryNodeId of entryNodes) {
for (let i = 0; i < branchCount; i++) {
const branchNodeId = buildBranchNodeId(entryNodeId, i)
if (dag.nodes.has(branchNodeId)) {
this.addEdge(dag, source, branchNodeId, sourceHandle, targetHandle)
}
}
}
}
if (source === parallelId) {
if (loopBlockIds.has(target) || parallelBlockIds.has(target)) continue
if (nodes.includes(target)) {
logger.warn('Invalid: parallel block connected to its own internal node', {
parallelId,
@@ -278,12 +311,16 @@ export class EdgeConstructor {
})
continue
}
logger.info('Wiring edge from parallel block', { parallelId, target, terminalNodes })
for (const terminalNodeId of terminalNodes) {
for (let i = 0; i < branchCount; i++) {
const branchNodeId = buildBranchNodeId(terminalNodeId, i)
if (dag.nodes.has(branchNodeId)) {
this.addEdge(dag, branchNodeId, target, sourceHandle, targetHandle)
const resolvedSourceId = pauseTriggerMapping.get(branchNodeId) ?? branchNodeId
this.addEdge(dag, resolvedSourceId, target, sourceHandle, targetHandle)
}
}
}
@@ -300,22 +337,28 @@ export class EdgeConstructor {
): boolean {
const sourceInLoop = blocksInLoops.has(source)
const targetInLoop = blocksInLoops.has(target)
if (sourceInLoop !== targetInLoop) {
return true
}
if (!sourceInLoop && !targetInLoop) {
return false
}
let sourceLoopId: string | undefined
let targetLoopId: string | undefined
for (const [loopId, loopConfig] of dag.loopConfigs) {
if (loopConfig.nodes.includes(source)) {
sourceLoopId = loopId
}
if (loopConfig.nodes.includes(target)) {
targetLoopId = loopId
}
}
return sourceLoopId !== targetLoopId
}
@@ -340,18 +383,23 @@ export class EdgeConstructor {
parallelId: string,
dag: DAG,
sourceHandle?: string,
targetHandle?: string
targetHandle?: string,
pauseTriggerMapping?: Map<string, string>
): void {
const parallelConfig = dag.parallelConfigs.get(parallelId)
if (!parallelConfig) {
throw new Error(`Parallel config not found: ${parallelId}`)
}
const distributionItems = parseDistributionItems(parallelConfig)
const count = calculateBranchCount(parallelConfig, distributionItems)
for (let i = 0; i < count; i++) {
const sourceNodeId = buildBranchNodeId(source, i)
const targetNodeId = buildBranchNodeId(target, i)
this.addEdge(dag, sourceNodeId, targetNodeId, sourceHandle, targetHandle)
const resolvedSourceId = pauseTriggerMapping?.get(sourceNodeId) ?? sourceNodeId
this.addEdge(dag, resolvedSourceId, targetNodeId, sourceHandle, targetHandle)
}
}
@@ -363,34 +411,45 @@ export class EdgeConstructor {
const nodesSet = new Set(nodes)
const startNodesSet = new Set<string>()
const terminalNodesSet = new Set<string>()
for (const nodeId of nodes) {
const node = dag.nodes.get(nodeId)
if (!node) continue
let hasIncomingFromLoop = false
for (const incomingNodeId of node.incomingEdges) {
if (nodesSet.has(incomingNodeId)) {
hasIncomingFromLoop = true
break
}
}
if (!hasIncomingFromLoop) {
startNodesSet.add(nodeId)
}
}
for (const nodeId of nodes) {
const node = dag.nodes.get(nodeId)
if (!node) continue
let hasOutgoingToLoop = false
for (const [_, edge] of node.outgoingEdges) {
if (nodesSet.has(edge.target)) {
hasOutgoingToLoop = true
break
}
}
if (!hasOutgoingToLoop) {
terminalNodesSet.add(nodeId)
}
}
return {
startNodes: Array.from(startNodesSet),
terminalNodes: Array.from(terminalNodesSet),
@@ -406,59 +465,80 @@ export class EdgeConstructor {
const entryNodesSet = new Set<string>()
const terminalNodesSet = new Set<string>()
const parallelConfig = dag.parallelConfigs.get(parallelId)
if (!parallelConfig) {
throw new Error(`Parallel config not found: ${parallelId}`)
}
const distributionItems = parseDistributionItems(parallelConfig)
const branchCount = calculateBranchCount(parallelConfig, distributionItems)
for (const nodeId of nodes) {
let hasAnyBranch = false
for (let i = 0; i < branchCount; i++) {
if (dag.nodes.has(buildBranchNodeId(nodeId, i))) {
hasAnyBranch = true
break
}
}
if (!hasAnyBranch) continue
const firstBranchId = buildBranchNodeId(nodeId, 0)
const firstBranchNode = dag.nodes.get(firstBranchId)
if (!firstBranchNode) continue
let hasIncomingFromParallel = false
for (const incomingNodeId of firstBranchNode.incomingEdges) {
const originalNodeId = extractBaseBlockId(incomingNodeId)
if (nodesSet.has(originalNodeId)) {
hasIncomingFromParallel = true
break
}
}
if (!hasIncomingFromParallel) {
entryNodesSet.add(nodeId)
}
}
for (const nodeId of nodes) {
let hasAnyBranch = false
for (let i = 0; i < branchCount; i++) {
if (dag.nodes.has(buildBranchNodeId(nodeId, i))) {
hasAnyBranch = true
break
}
}
if (!hasAnyBranch) continue
const firstBranchId = buildBranchNodeId(nodeId, 0)
const firstBranchNode = dag.nodes.get(firstBranchId)
if (!firstBranchNode) continue
let hasOutgoingToParallel = false
for (const [_, edge] of firstBranchNode.outgoingEdges) {
const originalTargetId = extractBaseBlockId(edge.target)
if (nodesSet.has(originalTargetId)) {
hasOutgoingToParallel = true
break
}
}
if (!hasOutgoingToParallel) {
terminalNodesSet.add(nodeId)
}
}
return {
entryNodes: Array.from(entryNodesSet),
terminalNodes: Array.from(terminalNodesSet),
@@ -485,25 +565,23 @@ export class EdgeConstructor {
): void {
const sourceNode = dag.nodes.get(sourceId)
const targetNode = dag.nodes.get(targetId)
if (!sourceNode || !targetNode) {
logger.warn('Edge references non-existent node', { sourceId, targetId })
return
}
const edgeId = `${sourceId}${targetId}`
sourceNode.outgoingEdges.set(edgeId, {
target: targetId,
sourceHandle,
targetHandle,
isActive: isLoopBackEdge ? false : undefined,
})
if (!isLoopBackEdge) {
targetNode.incomingEdges.add(sourceId)
logger.debug('Added incoming edge', { from: sourceId, to: targetId })
} else {
logger.debug('Skipped adding backwards-edge to incomingEdges', {
from: sourceId,
to: targetId,
})
}
}
}

View File

@@ -1,7 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, LOOP, type SentinelType } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import { buildSentinelEndId, buildSentinelStartId } from '@/executor/utils/subflow-utils'
import type { DAG, DAGNode } from '../builder'
const logger = createLogger('LoopConstructor')
@@ -9,16 +9,19 @@ export class LoopConstructor {
execute(dag: DAG, reachableBlocks: Set<string>): void {
for (const [loopId, loopConfig] of dag.loopConfigs) {
const loopNodes = loopConfig.nodes
if (loopNodes.length === 0) {
continue
}
if (!this.hasReachableNodes(loopNodes, reachableBlocks)) {
logger.debug('Skipping sentinel creation for unreachable loop', { loopId })
continue
}
this.createSentinelPair(dag, loopId)
}
}
private hasReachableNodes(loopNodes: string[], reachableBlocks: Set<string>): boolean {
return loopNodes.some((nodeId) => reachableBlocks.has(nodeId))
}
@@ -26,6 +29,7 @@ export class LoopConstructor {
private createSentinelPair(dag: DAG, loopId: string): void {
const startId = buildSentinelStartId(loopId)
const endId = buildSentinelEndId(loopId)
dag.nodes.set(
startId,
this.createSentinelNode({
@@ -33,9 +37,10 @@ export class LoopConstructor {
loopId,
sentinelType: LOOP.SENTINEL.START_TYPE,
blockType: BlockType.SENTINEL_START,
name: `Loop Start (${loopId})`,
name: `${LOOP.SENTINEL.START_NAME_PREFIX} (${loopId})`,
})
)
dag.nodes.set(
endId,
this.createSentinelNode({
@@ -43,15 +48,9 @@ export class LoopConstructor {
loopId,
sentinelType: LOOP.SENTINEL.END_TYPE,
blockType: BlockType.SENTINEL_END,
name: `Loop End (${loopId})`,
name: `${LOOP.SENTINEL.END_NAME_PREFIX} (${loopId})`,
})
)
logger.debug('Created sentinel pair for loop', {
loopId,
startId,
endId,
})
}
private createSentinelNode(config: {

View File

@@ -1,14 +1,12 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isMetadataOnlyBlockType } from '@/executor/consts'
import { BlockType, isMetadataOnlyBlockType } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import {
buildBranchNodeId,
calculateBranchCount,
parseDistributionItems,
} from '@/executor/utils/subflow-utils'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
import type { DAG, DAGNode } from '../builder'
const logger = createLogger('NodeConstructor')
interface ParallelExpansion {
parallelId: string
branchCount: number
@@ -20,39 +18,47 @@ export class NodeConstructor {
workflow: SerializedWorkflow,
dag: DAG,
reachableBlocks: Set<string>
): { blocksInLoops: Set<string>; blocksInParallels: Set<string> } {
): {
blocksInLoops: Set<string>
blocksInParallels: Set<string>
pauseTriggerMapping: Map<string, string>
} {
const blocksInLoops = new Set<string>()
const blocksInParallels = new Set<string>()
const pauseTriggerMapping = new Map<string, string>()
this.categorizeBlocks(dag, reachableBlocks, blocksInLoops, blocksInParallels)
for (const block of workflow.blocks) {
if (!this.shouldProcessBlock(block, reachableBlocks)) {
continue
}
const parallelId = this.findParallelForBlock(block.id, dag)
if (parallelId) {
this.createParallelBranchNodes(block, parallelId, dag)
} else {
this.createRegularOrLoopNode(block, blocksInLoops, dag)
}
}
return { blocksInLoops, blocksInParallels }
return { blocksInLoops, blocksInParallels, pauseTriggerMapping }
}
private shouldProcessBlock(block: SerializedBlock, reachableBlocks: Set<string>): boolean {
if (!block.enabled) {
return false
}
if (!reachableBlocks.has(block.id)) {
logger.debug('Skipping unreachable block', { blockId: block.id })
return false
}
if (isMetadataOnlyBlockType(block.metadata?.id)) {
logger.debug('Skipping metadata-only block', {
blockId: block.id,
blockType: block.metadata?.id,
})
return false
}
return true
}
@@ -96,11 +102,7 @@ export class NodeConstructor {
private createParallelBranchNodes(block: SerializedBlock, parallelId: string, dag: DAG): void {
const expansion = this.calculateParallelExpansion(parallelId, dag)
logger.debug('Creating parallel branches', {
blockId: block.id,
parallelId: expansion.parallelId,
branchCount: expansion.branchCount,
})
for (let branchIndex = 0; branchIndex < expansion.branchCount; branchIndex++) {
const branchNode = this.createParallelBranchNode(block, branchIndex, expansion)
dag.nodes.set(branchNode.id, branchNode)
@@ -109,11 +111,14 @@ export class NodeConstructor {
private calculateParallelExpansion(parallelId: string, dag: DAG): ParallelExpansion {
const config = dag.parallelConfigs.get(parallelId)
if (!config) {
throw new Error(`Parallel config not found: ${parallelId}`)
}
const distributionItems = parseDistributionItems(config)
const branchCount = calculateBranchCount(config, distributionItems)
return {
parallelId,
branchCount,
@@ -127,9 +132,13 @@ export class NodeConstructor {
expansion: ParallelExpansion
): DAGNode {
const branchNodeId = buildBranchNodeId(baseBlock.id, branchIndex)
const blockClone: SerializedBlock = {
...baseBlock,
id: branchNodeId,
}
return {
id: branchNodeId,
block: { ...baseBlock },
block: blockClone,
incomingEdges: new Set(),
outgoingEdges: new Map(),
metadata: {
@@ -138,6 +147,8 @@ export class NodeConstructor {
branchIndex,
branchTotal: expansion.branchCount,
distributionItem: expansion.distributionItems[branchIndex],
isPauseResponse: baseBlock.metadata?.id === BlockType.APPROVAL,
originalBlockId: baseBlock.id,
},
}
}
@@ -149,6 +160,8 @@ export class NodeConstructor {
): void {
const isLoopNode = blocksInLoops.has(block.id)
const loopId = isLoopNode ? this.findLoopIdForBlock(block.id, dag) : undefined
const isPauseBlock = block.metadata?.id === BlockType.APPROVAL
dag.nodes.set(block.id, {
id: block.id,
block,
@@ -157,10 +170,50 @@ export class NodeConstructor {
metadata: {
isLoopNode,
loopId,
isPauseResponse: isPauseBlock,
originalBlockId: block.id,
},
})
}
private createTriggerNode(
block: SerializedBlock,
triggerId: string,
options: {
loopId?: string
isParallelBranch?: boolean
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): DAGNode {
const triggerBlock: SerializedBlock = {
...block,
id: triggerId,
enabled: true,
metadata: {
...block.metadata,
id: BlockType.START_TRIGGER,
},
}
return {
id: triggerId,
block: triggerBlock,
incomingEdges: new Set(),
outgoingEdges: new Map(),
metadata: {
isResumeTrigger: true,
originalBlockId: block.id,
loopId: options.loopId,
isParallelBranch: options.isParallelBranch,
parallelId: options.parallelId,
branchIndex: options.branchIndex,
branchTotal: options.branchTotal,
},
}
}
private findLoopIdForBlock(blockId: string, dag: DAG): string | undefined {
for (const [loopId, loopConfig] of dag.loopConfigs) {
if (loopConfig.nodes.includes(blockId)) {

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isMetadataOnlyBlockType, isTriggerBlockType } from '@/executor/consts'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
const logger = createLogger('PathConstructor')
@@ -7,18 +8,15 @@ const logger = createLogger('PathConstructor')
export class PathConstructor {
execute(workflow: SerializedWorkflow, triggerBlockId?: string): Set<string> {
const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId)
if (!resolvedTriggerId) {
logger.warn('No trigger block found, including all enabled blocks as fallback')
return this.getAllEnabledBlocks(workflow)
}
logger.debug('Starting reachability traversal', { triggerBlockId: resolvedTriggerId })
const adjacency = this.buildAdjacencyMap(workflow)
const reachable = this.performBFS(resolvedTriggerId, adjacency)
logger.debug('Reachability analysis complete', {
triggerBlockId: resolvedTriggerId,
reachableCount: reachable.size,
totalBlocks: workflow.blocks.length,
})
return reachable
}
@@ -28,39 +26,43 @@ export class PathConstructor {
): string | undefined {
if (triggerBlockId) {
const block = workflow.blocks.find((b) => b.id === triggerBlockId)
if (!block) {
logger.error('Provided triggerBlockId not found in workflow', {
triggerBlockId,
availableBlocks: workflow.blocks.map((b) => ({ id: b.id, type: b.metadata?.id })),
})
throw new Error(`Trigger block not found: ${triggerBlockId}`)
if (block) {
return triggerBlockId
}
logger.debug('Using explicitly provided trigger block', {
const fallbackTriggerId = this.resolveResumeTriggerFallback(triggerBlockId, workflow)
if (fallbackTriggerId) {
return fallbackTriggerId
}
logger.error('Provided triggerBlockId not found in workflow', {
triggerBlockId,
blockType: block.metadata?.id,
availableBlocks: workflow.blocks.map((b) => ({ id: b.id, type: b.metadata?.id })),
})
return triggerBlockId
throw new Error(`Trigger block not found: ${triggerBlockId}`)
}
const explicitTrigger = this.findExplicitTrigger(workflow)
if (explicitTrigger) {
return explicitTrigger
}
const rootBlock = this.findRootBlock(workflow)
if (rootBlock) {
return rootBlock
}
return undefined
}
private findExplicitTrigger(workflow: SerializedWorkflow): string | undefined {
for (const block of workflow.blocks) {
if (block.enabled && this.isTriggerBlock(block)) {
logger.debug('Found explicit trigger block', {
blockId: block.id,
blockType: block.metadata?.id,
})
return block.id
}
}
@@ -69,40 +71,37 @@ export class PathConstructor {
private findRootBlock(workflow: SerializedWorkflow): string | undefined {
const hasIncoming = new Set(workflow.connections.map((c) => c.target))
for (const block of workflow.blocks) {
if (
!hasIncoming.has(block.id) &&
block.enabled &&
!isMetadataOnlyBlockType(block.metadata?.id)
) {
logger.debug('Found root block (no incoming connections)', {
blockId: block.id,
blockType: block.metadata?.id,
})
return block.id
}
}
return undefined
}
private isTriggerBlock(block: SerializedBlock): boolean {
return isTriggerBlockType(block.metadata?.id)
}
private getAllEnabledBlocks(workflow: SerializedWorkflow): Set<string> {
return new Set(workflow.blocks.filter((b) => b.enabled).map((b) => b.id))
}
private buildAdjacencyMap(workflow: SerializedWorkflow): Map<string, string[]> {
const adjacency = new Map<string, string[]>()
for (const connection of workflow.connections) {
const neighbors = adjacency.get(connection.source) ?? []
neighbors.push(connection.target)
adjacency.set(connection.source, neighbors)
}
logger.debug('Built adjacency map', {
nodeCount: adjacency.size,
connectionCount: workflow.connections.length,
})
return adjacency
}
@@ -110,43 +109,44 @@ export class PathConstructor {
const reachable = new Set<string>([triggerBlockId])
const queue = [triggerBlockId]
logger.debug('Starting BFS traversal', {
triggerBlockId,
adjacencyMapSize: adjacency.size,
adjacencyEntries: Array.from(adjacency.entries()).map(([source, targets]) => ({
source,
targets,
})),
})
while (queue.length > 0) {
const currentBlockId = queue.shift()
if (!currentBlockId) break
const neighbors = adjacency.get(currentBlockId) ?? []
logger.debug('BFS processing node', {
currentBlockId,
neighbors,
neighborCount: neighbors.length,
})
for (const neighborId of neighbors) {
if (!reachable.has(neighborId)) {
logger.debug('BFS found new reachable node', {
from: currentBlockId,
to: neighborId,
})
reachable.add(neighborId)
queue.push(neighborId)
}
}
}
logger.debug('BFS traversal complete', {
triggerBlockId,
reachableCount: reachable.size,
reachableBlocks: Array.from(reachable),
})
return reachable
}
private resolveResumeTriggerFallback(
triggerBlockId: string,
workflow: SerializedWorkflow
): string | undefined {
if (!triggerBlockId.endsWith('__trigger')) {
return undefined
}
const baseId = triggerBlockId.replace(/__trigger$/, '')
const normalizedBaseId = extractBaseBlockId(baseId)
const candidates = baseId === normalizedBaseId ? [baseId] : [baseId, normalizedBaseId]
for (const candidate of candidates) {
const block = workflow.blocks.find((b) => b.id === candidate)
if (block) {
return candidate
}
}
return undefined
}
}

View File

@@ -7,7 +7,7 @@ export interface DAGEdge {
export interface NodeMetadata {
isParallelBranch?: boolean
parallelId?: string // Which parallel this branch belongs to
parallelId?: string
branchIndex?: number
branchTotal?: number
distributionItem?: unknown
@@ -15,4 +15,7 @@ export interface NodeMetadata {
loopId?: string
isSentinel?: boolean
sentinelType?: 'start' | 'end'
isPauseResponse?: boolean
isResumeTrigger?: boolean
originalBlockId?: string
}

View File

@@ -1,17 +1,30 @@
import { createLogger } from '@/lib/logs/console/logger'
import { DEFAULTS, EDGE, isSentinelBlockType } from '@/executor/consts'
import { getBaseUrl } from '@/lib/urls/utils'
import {
BlockType,
buildResumeApiUrl,
buildResumeUiUrl,
DEFAULTS,
EDGE,
isSentinelBlockType,
} from '@/executor/consts'
import type { DAGNode } from '@/executor/dag/builder'
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
import {
generatePauseContextId,
mapNodeMetadataToPauseScopes,
} from '@/executor/pause-resume/utils.ts'
import type {
BlockHandler,
BlockLog,
BlockState,
ExecutionContext,
NormalizedBlockOutput,
} from '@/executor/types'
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedBlock } from '@/serializer/types'
import type { SubflowType } from '@/stores/workflows/workflow/types'
import type { DAGNode } from '../dag/builder'
import type { VariableResolver } from '../variables/resolver'
import type { ExecutionState } from './state'
import type { ContextExtensions } from './types'
const logger = createLogger('BlockExecutor')
@@ -20,7 +33,7 @@ export class BlockExecutor {
private blockHandlers: BlockHandler[],
private resolver: VariableResolver,
private contextExtensions: ContextExtensions,
private state?: ExecutionState
private state: BlockStateWriter
) {}
async execute(
@@ -30,7 +43,11 @@ export class BlockExecutor {
): Promise<NormalizedBlockOutput> {
const handler = this.findHandler(block)
if (!handler) {
throw new Error(`No handler found for block type: ${block.metadata?.id}`)
throw buildBlockExecutionError({
block,
context: ctx,
error: `No handler found for block type: ${block.metadata?.id ?? 'unknown'}`,
})
}
const isSentinel = isSentinelBlockType(block.metadata?.id ?? '')
@@ -45,9 +62,23 @@ export class BlockExecutor {
const startTime = Date.now()
let resolvedInputs: Record<string, any> = {}
const nodeMetadata = this.buildNodeMetadata(node)
let cleanupSelfReference: (() => void) | undefined
if (block.metadata?.id === BlockType.APPROVAL) {
cleanupSelfReference = this.preparePauseResumeSelfReference(ctx, node, block, nodeMetadata)
}
try {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
const output = await handler.execute(ctx, block, resolvedInputs)
} finally {
cleanupSelfReference?.()
}
try {
const output = handler.executeWithNode
? await handler.executeWithNode(ctx, block, resolvedInputs, nodeMetadata)
: await handler.execute(ctx, block, resolvedInputs)
const isStreamingExecution =
output && typeof output === 'object' && 'stream' in output && 'execution' in output
@@ -65,7 +96,7 @@ export class BlockExecutor {
}
normalizedOutput = this.normalizeOutput(
streamingExec.execution.output || streamingExec.execution
streamingExec.execution.output ?? streamingExec.execution
)
} else {
normalizedOutput = this.normalizeOutput(output)
@@ -77,23 +108,20 @@ export class BlockExecutor {
blockLog.endedAt = new Date().toISOString()
blockLog.durationMs = duration
blockLog.success = true
blockLog.output = normalizedOutput
blockLog.output = this.filterOutputForLog(block, normalizedOutput)
}
ctx.blockStates.set(node.id, {
output: normalizedOutput,
executed: true,
executionTime: duration,
})
this.state.setBlockOutput(node.id, normalizedOutput, duration)
if (!isSentinel) {
this.callOnBlockComplete(ctx, node, block, resolvedInputs, normalizedOutput, duration)
const filteredOutput = this.filterOutputForLog(block, normalizedOutput)
this.callOnBlockComplete(ctx, node, block, resolvedInputs, filteredOutput, duration)
}
return normalizedOutput
} catch (error) {
const duration = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : String(error)
const errorMessage = normalizeError(error)
if (blockLog) {
blockLog.endedAt = new Date().toISOString()
@@ -106,11 +134,7 @@ export class BlockExecutor {
error: errorMessage,
}
ctx.blockStates.set(node.id, {
output: errorOutput,
executed: true,
executionTime: duration,
})
this.state.setBlockOutput(node.id, errorOutput, duration)
logger.error('Block execution failed', {
blockId: node.id,
@@ -132,7 +156,39 @@ export class BlockExecutor {
return errorOutput
}
throw error
let errorToThrow: Error | string
if (error instanceof Error) {
errorToThrow = error
} else {
errorToThrow = errorMessage
}
throw buildBlockExecutionError({
block,
error: errorToThrow,
context: ctx,
additionalInfo: {
nodeId: node.id,
executionTime: duration,
},
})
}
}
private buildNodeMetadata(node: DAGNode): {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
} {
const metadata = node?.metadata ?? {}
return {
nodeId: node.id,
loopId: metadata.loopId,
parallelId: metadata.parallelId,
branchIndex: metadata.branchIndex,
branchTotal: metadata.branchTotal,
}
}
@@ -155,7 +211,7 @@ export class BlockExecutor {
block: SerializedBlock,
node: DAGNode
): BlockLog {
let blockName = block.metadata?.name || blockId
let blockName = block.metadata?.name ?? blockId
let loopId: string | undefined
let parallelId: string | undefined
let iterationIndex: number | undefined
@@ -165,24 +221,12 @@ export class BlockExecutor {
blockName = `${blockName} (iteration ${node.metadata.branchIndex})`
iterationIndex = node.metadata.branchIndex
parallelId = node.metadata.parallelId
logger.debug('Added parallel iteration suffix', {
blockId,
parallelId,
branchIndex: node.metadata.branchIndex,
blockName,
})
} else if (node.metadata.isLoopNode && node.metadata.loopId && this.state) {
} else if (node.metadata.isLoopNode && node.metadata.loopId) {
loopId = node.metadata.loopId
const loopScope = this.state.getLoopScope(loopId)
const loopScope = ctx.loopExecutions?.get(loopId)
if (loopScope && loopScope.iteration !== undefined) {
blockName = `${blockName} (iteration ${loopScope.iteration})`
iterationIndex = loopScope.iteration
logger.debug('Added loop iteration suffix', {
blockId,
loopId,
iteration: loopScope.iteration,
blockName,
})
} else {
logger.warn('Loop scope not found for block', { blockId, loopId })
}
@@ -192,7 +236,7 @@ export class BlockExecutor {
return {
blockId,
blockName,
blockType: block.metadata?.id || DEFAULTS.BLOCK_TYPE,
blockType: block.metadata?.id ?? DEFAULTS.BLOCK_TYPE,
startedAt: new Date().toISOString(),
endedAt: '',
durationMs: 0,
@@ -215,12 +259,28 @@ export class BlockExecutor {
return { result: output }
}
private filterOutputForLog(
block: SerializedBlock,
output: NormalizedBlockOutput
): NormalizedBlockOutput {
if (block.metadata?.id === BlockType.APPROVAL) {
const filtered: NormalizedBlockOutput = {}
for (const [key, value] of Object.entries(output)) {
if (key.startsWith('_')) continue
if (key === 'response') continue
filtered[key] = value
}
return filtered
}
return output
}
private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void {
const blockId = node.id
const blockName = block.metadata?.name || blockId
const blockType = block.metadata?.id || DEFAULTS.BLOCK_TYPE
const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
const iterationContext = this.getIterationContext(node)
const iterationContext = this.getIterationContext(ctx, node)
if (this.contextExtensions.onBlockStart) {
this.contextExtensions.onBlockStart(blockId, blockName, blockType, iterationContext)
@@ -236,10 +296,10 @@ export class BlockExecutor {
duration: number
): void {
const blockId = node.id
const blockName = block.metadata?.name || blockId
const blockType = block.metadata?.id || DEFAULTS.BLOCK_TYPE
const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
const iterationContext = this.getIterationContext(node)
const iterationContext = this.getIterationContext(ctx, node)
if (this.contextExtensions.onBlockComplete) {
this.contextExtensions.onBlockComplete(
@@ -257,6 +317,7 @@ export class BlockExecutor {
}
private getIterationContext(
ctx: ExecutionContext,
node: DAGNode
): { iterationCurrent: number; iterationTotal: number; iterationType: SubflowType } | undefined {
if (!node?.metadata) return undefined
@@ -269,8 +330,8 @@ export class BlockExecutor {
}
}
if (node.metadata.isLoopNode && node.metadata.loopId && this.state) {
const loopScope = this.state.getLoopScope(node.metadata.loopId)
if (node.metadata.isLoopNode && node.metadata.loopId) {
const loopScope = ctx.loopExecutions?.get(node.metadata.loopId)
if (loopScope && loopScope.iteration !== undefined && loopScope.maxIterations) {
return {
iterationCurrent: loopScope.iteration,
@@ -282,4 +343,74 @@ export class BlockExecutor {
return undefined
}
private preparePauseResumeSelfReference(
ctx: ExecutionContext,
node: DAGNode,
block: SerializedBlock,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): (() => void) | undefined {
const blockId = node.id
const existingState = ctx.blockStates.get(blockId)
if (existingState?.executed) {
return undefined
}
const executionId = ctx.executionId ?? ctx.metadata?.executionId
const workflowId = ctx.workflowId
if (!executionId || !workflowId) {
return undefined
}
const { loopScope } = mapNodeMetadataToPauseScopes(ctx, nodeMetadata)
const contextId = generatePauseContextId(block.id, nodeMetadata, loopScope)
let resumeLinks: { apiUrl: string; uiUrl: string }
try {
const baseUrl = getBaseUrl()
resumeLinks = {
apiUrl: buildResumeApiUrl(baseUrl, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(baseUrl, workflowId, executionId),
}
} catch {
resumeLinks = {
apiUrl: buildResumeApiUrl(undefined, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(undefined, workflowId, executionId),
}
}
let previousState: BlockState | undefined
if (existingState) {
previousState = { ...existingState }
}
const hadPrevious = existingState !== undefined
const placeholderState: BlockState = {
output: {
uiUrl: resumeLinks.uiUrl,
apiUrl: resumeLinks.apiUrl,
},
executed: false,
executionTime: existingState?.executionTime ?? 0,
}
this.state.setBlockState(blockId, placeholderState)
return () => {
if (hadPrevious && previousState) {
this.state.setBlockState(blockId, previousState)
} else {
this.state.deleteBlockState(blockId)
}
}
}
}

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import { EDGE } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { DAGEdge } from '@/executor/dag/types'
import type { NormalizedBlockOutput } from '@/executor/types'
import type { DAG, DAGNode } from '../dag/builder'
import type { DAGEdge } from '../dag/types'
const logger = createLogger('EdgeManager')
@@ -17,15 +17,9 @@ export class EdgeManager {
skipBackwardsEdge = false
): string[] {
const readyNodes: string[] = []
logger.debug('Processing outgoing edges', {
nodeId: node.id,
edgeCount: node.outgoingEdges.size,
skipBackwardsEdge,
})
for (const [edgeId, edge] of node.outgoingEdges) {
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
logger.debug('Skipping backwards edge', { edgeId })
continue
}
@@ -40,14 +34,6 @@ export class EdgeManager {
this.deactivateEdgeAndDescendants(node.id, edge.target, edge.sourceHandle)
}
logger.debug('Edge not activated', {
edgeId,
sourceHandle: edge.sourceHandle,
from: node.id,
to: edge.target,
isLoopEdge,
deactivatedDescendants: !isLoopEdge,
})
continue
}
@@ -58,14 +44,8 @@ export class EdgeManager {
}
targetNode.incomingEdges.delete(node.id)
logger.debug('Removed incoming edge', {
from: node.id,
target: edge.target,
remainingIncomingEdges: targetNode.incomingEdges.size,
})
if (this.isNodeReady(targetNode)) {
logger.debug('Node ready', { nodeId: targetNode.id })
readyNodes.push(targetNode.id)
}
}
@@ -80,18 +60,9 @@ export class EdgeManager {
const activeIncomingCount = this.countActiveIncomingEdges(node)
if (activeIncomingCount > 0) {
logger.debug('Node not ready - waiting for active incoming edges', {
nodeId: node.id,
totalIncoming: node.incomingEdges.size,
activeIncoming: activeIncomingCount,
})
return false
}
logger.debug('Node ready - all remaining edges are deactivated', {
nodeId: node.id,
totalIncoming: node.incomingEdges.size,
})
return true
}
@@ -103,10 +74,6 @@ export class EdgeManager {
}
targetNode.incomingEdges.add(sourceNodeId)
logger.debug('Restored incoming edge', {
from: sourceNodeId,
to: targetNodeId,
})
}
clearDeactivatedEdges(): void {
@@ -116,33 +83,37 @@ export class EdgeManager {
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
const handle = edge.sourceHandle
if (handle?.startsWith(EDGE.CONDITION_PREFIX)) {
if (!handle) {
return true
}
if (handle.startsWith(EDGE.CONDITION_PREFIX)) {
const conditionValue = handle.substring(EDGE.CONDITION_PREFIX.length)
return output.selectedOption === conditionValue
}
if (handle?.startsWith(EDGE.ROUTER_PREFIX)) {
if (handle.startsWith(EDGE.ROUTER_PREFIX)) {
const routeId = handle.substring(EDGE.ROUTER_PREFIX.length)
return output.selectedRoute === routeId
}
if (handle === EDGE.LOOP_CONTINUE || handle === EDGE.LOOP_CONTINUE_ALT) {
return output.selectedRoute === EDGE.LOOP_CONTINUE
}
switch (handle) {
case EDGE.LOOP_CONTINUE:
case EDGE.LOOP_CONTINUE_ALT:
return output.selectedRoute === EDGE.LOOP_CONTINUE
if (handle === EDGE.LOOP_EXIT) {
return output.selectedRoute === EDGE.LOOP_EXIT
}
case EDGE.LOOP_EXIT:
return output.selectedRoute === EDGE.LOOP_EXIT
if (handle === EDGE.ERROR && !output.error) {
return false
}
case EDGE.ERROR:
return !!output.error
if (handle === EDGE.SOURCE && output.error) {
return false
}
case EDGE.SOURCE:
return !output.error
return true
default:
return true
}
}
private isBackwardsEdge(sourceHandle?: string): boolean {
@@ -165,7 +136,6 @@ export class EdgeManager {
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, sourceId)
if (!hasOtherActiveIncoming) {
logger.debug('Deactivating descendants of unreachable node', { nodeId: targetId })
for (const [_, outgoingEdge] of targetNode.outgoingEdges) {
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle)
}
@@ -218,6 +188,6 @@ export class EdgeManager {
}
private createEdgeKey(sourceId: string, targetId: string, sourceHandle?: string): string {
return `${sourceId}-${targetId}-${sourceHandle || EDGE.DEFAULT}`
return `${sourceId}-${targetId}-${sourceHandle ?? EDGE.DEFAULT}`
}
}

View File

@@ -1,9 +1,18 @@
import { createLogger } from '@/lib/logs/console/logger'
import { BlockType } from '@/executor/consts'
import type { ExecutionContext, ExecutionResult, NormalizedBlockOutput } from '@/executor/types'
import type { DAG } from '../dag/builder'
import type { NodeExecutionOrchestrator } from '../orchestrators/node'
import type { EdgeManager } from './edge-manager'
import type { DAG } from '@/executor/dag/builder'
import type { EdgeManager } from '@/executor/execution/edge-manager'
import { serializePauseSnapshot } from '@/executor/execution/snapshot-serializer'
import type { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
import type {
ExecutionContext,
ExecutionResult,
NormalizedBlockOutput,
PauseMetadata,
PausePoint,
ResumeStatus,
} from '@/executor/types'
import { normalizeError } from '@/executor/utils/errors'
const logger = createLogger('ExecutionEngine')
@@ -12,32 +21,32 @@ export class ExecutionEngine {
private executing = new Set<Promise<void>>()
private queueLock = Promise.resolve()
private finalOutput: NormalizedBlockOutput = {}
private pausedBlocks: Map<string, PauseMetadata> = new Map()
private allowResumeTriggers: boolean
constructor(
private context: ExecutionContext,
private dag: DAG,
private edgeManager: EdgeManager,
private nodeOrchestrator: NodeExecutionOrchestrator,
private context: ExecutionContext
) {}
private nodeOrchestrator: NodeExecutionOrchestrator
) {
this.allowResumeTriggers = this.context.metadata.resumeFromSnapshot === true
}
async run(triggerBlockId?: string): Promise<ExecutionResult> {
const startTime = Date.now()
try {
this.initializeQueue(triggerBlockId)
logger.debug('Starting execution loop', {
initialQueueSize: this.readyQueue.length,
startNodeId: triggerBlockId,
})
while (this.hasWork()) {
await this.processQueue()
}
logger.debug('Execution loop completed', {
finalOutputKeys: Object.keys(this.finalOutput),
})
await this.waitForAllExecutions()
if (this.pausedBlocks.size > 0) {
return this.buildPausedResult(startTime)
}
const endTime = Date.now()
this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime
@@ -53,7 +62,7 @@ export class ExecutionEngine {
this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime
const errorMessage = error instanceof Error ? error.message : String(error)
const errorMessage = normalizeError(error)
logger.error('Execution failed', { error: errorMessage })
const executionResult: ExecutionResult = {
@@ -74,9 +83,13 @@ export class ExecutionEngine {
}
private addToQueue(nodeId: string): void {
const node = this.dag.nodes.get(nodeId)
if (node?.metadata?.isResumeTrigger && !this.allowResumeTriggers) {
return
}
if (!this.readyQueue.includes(nodeId)) {
this.readyQueue.push(nodeId)
logger.debug('Added to queue', { nodeId, queueLength: this.readyQueue.length })
}
}
@@ -122,6 +135,56 @@ export class ExecutionEngine {
}
private initializeQueue(triggerBlockId?: string): void {
const pendingBlocks = this.context.metadata.pendingBlocks
const remainingEdges = (this.context.metadata as any).remainingEdges
if (remainingEdges && Array.isArray(remainingEdges) && remainingEdges.length > 0) {
logger.info('Removing edges from resumed pause blocks', {
edgeCount: remainingEdges.length,
edges: remainingEdges,
})
for (const edge of remainingEdges) {
const targetNode = this.dag.nodes.get(edge.target)
if (targetNode) {
const hadEdge = targetNode.incomingEdges.has(edge.source)
targetNode.incomingEdges.delete(edge.source)
if (this.edgeManager.isNodeReady(targetNode)) {
logger.info('Node became ready after edge removal', { nodeId: targetNode.id })
this.addToQueue(targetNode.id)
}
}
}
logger.info('Edge removal complete, queued ready nodes', {
queueLength: this.readyQueue.length,
queuedNodes: this.readyQueue,
})
return
}
if (pendingBlocks && pendingBlocks.length > 0) {
logger.info('Initializing queue from pending blocks (resume mode)', {
pendingBlocks,
allowResumeTriggers: this.allowResumeTriggers,
dagNodeCount: this.dag.nodes.size,
})
for (const nodeId of pendingBlocks) {
this.addToQueue(nodeId)
}
logger.info('Pending blocks queued', {
queueLength: this.readyQueue.length,
queuedNodes: this.readyQueue,
})
this.context.metadata.pendingBlocks = []
return
}
if (triggerBlockId) {
this.addToQueue(triggerBlockId)
return
@@ -155,18 +218,17 @@ export class ExecutionEngine {
private async executeNodeAsync(nodeId: string): Promise<void> {
try {
const wasAlreadyExecuted = this.context.executedBlocks.has(nodeId)
const result = await this.nodeOrchestrator.executeNode(nodeId, this.context)
const node = this.dag.nodes.get(nodeId)
const result = await this.nodeOrchestrator.executeNode(this.context, nodeId)
if (!wasAlreadyExecuted) {
await this.withQueueLock(async () => {
await this.handleNodeCompletion(nodeId, result.output, result.isFinalOutput)
})
} else {
logger.debug('Node was already executed, skipping edge processing to avoid loops', {
nodeId,
})
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
const errorMessage = normalizeError(error)
logger.error('Node execution failed', { nodeId, error: errorMessage })
throw error
}
@@ -183,19 +245,73 @@ export class ExecutionEngine {
return
}
await this.nodeOrchestrator.handleNodeCompletion(nodeId, output, this.context)
if (output._pauseMetadata) {
const pauseMetadata = output._pauseMetadata
this.pausedBlocks.set(pauseMetadata.contextId, pauseMetadata)
this.context.metadata.status = 'paused'
this.context.metadata.pausePoints = Array.from(this.pausedBlocks.keys())
return
}
await this.nodeOrchestrator.handleNodeCompletion(this.context, nodeId, output)
if (isFinalOutput) {
this.finalOutput = output
}
const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false)
this.addMultipleToQueue(readyNodes)
logger.debug('Node completion handled', {
logger.info('Processing outgoing edges', {
nodeId,
outgoingEdgesCount: node.outgoingEdges.size,
readyNodesCount: readyNodes.length,
queueSize: this.readyQueue.length,
readyNodes,
})
this.addMultipleToQueue(readyNodes)
}
private buildPausedResult(startTime: number): ExecutionResult {
const endTime = Date.now()
this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime
this.context.metadata.status = 'paused'
const snapshotSeed = serializePauseSnapshot(this.context, [], this.dag)
const pausePoints: PausePoint[] = Array.from(this.pausedBlocks.values()).map((pause) => ({
contextId: pause.contextId,
blockId: pause.blockId,
response: pause.response,
registeredAt: pause.timestamp,
resumeStatus: 'paused' as ResumeStatus,
snapshotReady: true,
parallelScope: pause.parallelScope,
loopScope: pause.loopScope,
resumeLinks: pause.resumeLinks,
}))
return {
success: true,
output: this.collectPauseResponses(),
logs: this.context.blockLogs,
metadata: this.context.metadata,
status: 'paused',
pausePoints,
snapshotSeed,
}
}
private collectPauseResponses(): NormalizedBlockOutput {
const responses = Array.from(this.pausedBlocks.values()).map((pause) => pause.response)
if (responses.length === 1) {
return responses[0]
}
return {
pausedBlocks: responses,
pauseCount: responses.length,
}
}
}

View File

@@ -1,23 +1,24 @@
import { createLogger } from '@/lib/logs/console/logger'
import { StartBlockPath } from '@/lib/workflows/triggers'
import type { BlockOutput } from '@/blocks/types'
import { DAGBuilder } from '@/executor/dag/builder'
import { BlockExecutor } from '@/executor/execution/block-executor'
import { EdgeManager } from '@/executor/execution/edge-manager'
import { ExecutionEngine } from '@/executor/execution/engine'
import { ExecutionState } from '@/executor/execution/state'
import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
import { createBlockHandlers } from '@/executor/handlers/registry'
import type { ExecutionContext, ExecutionResult } from '@/executor/types'
import { LoopOrchestrator } from '@/executor/orchestrators/loop'
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
import {
buildResolutionFromBlock,
buildStartBlockOutput,
resolveExecutorStartBlock,
} from '@/executor/utils/start-block'
import { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedWorkflow } from '@/serializer/types'
import { DAGBuilder } from '../dag/builder'
import { LoopOrchestrator } from '../orchestrators/loop'
import { NodeExecutionOrchestrator } from '../orchestrators/node'
import { ParallelOrchestrator } from '../orchestrators/parallel'
import { VariableResolver } from '../variables/resolver'
import { BlockExecutor } from './block-executor'
import { EdgeManager } from './edge-manager'
import { ExecutionEngine } from './engine'
import { ExecutionState } from './state'
import type { ContextExtensions, WorkflowInput } from './types'
const logger = createLogger('DAGExecutor')
@@ -32,7 +33,6 @@ export interface DAGExecutorOptions {
export class DAGExecutor {
private workflow: SerializedWorkflow
private initialBlockStates: Record<string, BlockOutput>
private environmentVariables: Record<string, string>
private workflowInput: WorkflowInput
private workflowVariables: Record<string, unknown>
@@ -42,19 +42,25 @@ export class DAGExecutor {
constructor(options: DAGExecutorOptions) {
this.workflow = options.workflow
this.initialBlockStates = options.currentBlockStates || {}
this.environmentVariables = options.envVarValues || {}
this.workflowInput = options.workflowInput || {}
this.workflowVariables = options.workflowVariables || {}
this.contextExtensions = options.contextExtensions || {}
this.environmentVariables = options.envVarValues ?? {}
this.workflowInput = options.workflowInput ?? {}
this.workflowVariables = options.workflowVariables ?? {}
this.contextExtensions = options.contextExtensions ?? {}
this.dagBuilder = new DAGBuilder()
}
async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> {
const dag = this.dagBuilder.build(this.workflow, triggerBlockId)
const context = this.createExecutionContext(workflowId, triggerBlockId)
// Create state with shared references to context's maps/sets for single source of truth
const state = new ExecutionState(context.blockStates, context.executedBlocks)
const savedIncomingEdges = this.contextExtensions.dagIncomingEdges
const dag = this.dagBuilder.build(this.workflow, triggerBlockId, savedIncomingEdges)
const { context, state } = this.createExecutionContext(workflowId, triggerBlockId)
// Link cancellation flag to context
Object.defineProperty(context, 'isCancelled', {
get: () => this.isCancelled,
enumerable: true,
configurable: true,
})
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
@@ -68,7 +74,7 @@ export class DAGExecutor {
loopOrchestrator,
parallelOrchestrator
)
const engine = new ExecutionEngine(dag, edgeManager, nodeOrchestrator, context)
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
return await engine.run(triggerBlockId)
}
@@ -77,14 +83,14 @@ export class DAGExecutor {
}
async continueExecution(
pendingBlocks: string[],
_pendingBlocks: string[],
context: ExecutionContext
): Promise<ExecutionResult> {
logger.warn('Debug mode (continueExecution) is not yet implemented in the refactored executor')
return {
success: false,
output: {},
logs: context.blockLogs || [],
logs: context.blockLogs ?? [],
error: 'Debug mode is not yet supported in the refactored executor',
metadata: {
duration: 0,
@@ -93,44 +99,118 @@ export class DAGExecutor {
}
}
private createExecutionContext(workflowId: string, triggerBlockId?: string): ExecutionContext {
private createExecutionContext(
workflowId: string,
triggerBlockId?: string
): { context: ExecutionContext; state: ExecutionState } {
const snapshotState = this.contextExtensions.snapshotState
const blockStates = snapshotState?.blockStates
? new Map(Object.entries(snapshotState.blockStates))
: new Map<string, BlockState>()
const executedBlocks = snapshotState?.executedBlocks
? new Set(snapshotState.executedBlocks)
: new Set<string>()
const state = new ExecutionState(blockStates, executedBlocks)
const context: ExecutionContext = {
workflowId,
workspaceId: this.contextExtensions.workspaceId,
executionId: this.contextExtensions.executionId,
userId: this.contextExtensions.userId,
isDeployedContext: this.contextExtensions.isDeployedContext,
blockStates: new Map(),
blockLogs: [],
blockStates: state.getBlockStates(),
blockLogs: snapshotState?.blockLogs ?? [],
metadata: {
startTime: new Date().toISOString(),
duration: 0,
useDraftState: this.contextExtensions.isDeployedContext !== true,
},
environmentVariables: this.environmentVariables,
workflowVariables: this.workflowVariables,
decisions: {
router: new Map(),
condition: new Map(),
router: snapshotState?.decisions?.router
? new Map(Object.entries(snapshotState.decisions.router))
: new Map(),
condition: snapshotState?.decisions?.condition
? new Map(Object.entries(snapshotState.decisions.condition))
: new Map(),
},
loopIterations: new Map(),
loopItems: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: snapshotState?.completedLoops
? new Set(snapshotState.completedLoops)
: new Set(),
loopExecutions: snapshotState?.loopExecutions
? new Map(
Object.entries(snapshotState.loopExecutions).map(([loopId, scope]) => [
loopId,
{
...scope,
currentIterationOutputs: scope.currentIterationOutputs
? new Map(Object.entries(scope.currentIterationOutputs))
: new Map(),
},
])
)
: new Map(),
parallelExecutions: snapshotState?.parallelExecutions
? new Map(
Object.entries(snapshotState.parallelExecutions).map(([parallelId, scope]) => [
parallelId,
{
...scope,
branchOutputs: scope.branchOutputs
? new Map(Object.entries(scope.branchOutputs).map(([k, v]) => [Number(k), v]))
: new Map(),
},
])
)
: new Map(),
executedBlocks: state.getExecutedBlocks(),
activeExecutionPath: snapshotState?.activeExecutionPath
? new Set(snapshotState.activeExecutionPath)
: new Set(),
workflow: this.workflow,
stream: this.contextExtensions.stream || false,
selectedOutputs: this.contextExtensions.selectedOutputs || [],
edges: this.contextExtensions.edges || [],
stream: this.contextExtensions.stream ?? false,
selectedOutputs: this.contextExtensions.selectedOutputs ?? [],
edges: this.contextExtensions.edges ?? [],
onStream: this.contextExtensions.onStream,
onBlockStart: this.contextExtensions.onBlockStart,
onBlockComplete: this.contextExtensions.onBlockComplete,
}
this.initializeStarterBlock(context, triggerBlockId)
return context
if (this.contextExtensions.resumeFromSnapshot) {
context.metadata.resumeFromSnapshot = true
logger.info('Resume from snapshot enabled', {
resumePendingQueue: this.contextExtensions.resumePendingQueue,
remainingEdges: this.contextExtensions.remainingEdges,
triggerBlockId,
})
}
if (this.contextExtensions.remainingEdges) {
;(context.metadata as any).remainingEdges = this.contextExtensions.remainingEdges
logger.info('Set remaining edges for resume', {
edgeCount: this.contextExtensions.remainingEdges.length,
})
}
if (this.contextExtensions.resumePendingQueue?.length) {
context.metadata.pendingBlocks = [...this.contextExtensions.resumePendingQueue]
logger.info('Set pending blocks from resume queue', {
pendingBlocks: context.metadata.pendingBlocks,
skipStarterBlockInit: true,
})
} else {
this.initializeStarterBlock(context, state, triggerBlockId)
}
return { context, state }
}
private initializeStarterBlock(context: ExecutionContext, triggerBlockId?: string): void {
private initializeStarterBlock(
context: ExecutionContext,
state: ExecutionState,
triggerBlockId?: string
): void {
let startResolution: ReturnType<typeof resolveExecutorStartBlock> | null = null
if (triggerBlockId) {
@@ -145,14 +225,10 @@ export class DAGExecutor {
startResolution = buildResolutionFromBlock(triggerBlock)
if (!startResolution) {
logger.debug('Creating generic resolution for trigger block', {
triggerBlockId,
blockType: triggerBlock.metadata?.id,
})
startResolution = {
blockId: triggerBlock.id,
block: triggerBlock,
path: 'split_manual' as any,
path: StartBlockPath.SPLIT_MANUAL,
}
}
} else {
@@ -167,21 +243,20 @@ export class DAGExecutor {
}
}
if (state.getBlockStates().has(startResolution.block.id)) {
return
}
const blockOutput = buildStartBlockOutput({
resolution: startResolution,
workflowInput: this.workflowInput,
isDeployedExecution: this.contextExtensions?.isDeployedContext === true,
})
context.blockStates.set(startResolution.block.id, {
state.setBlockState(startResolution.block.id, {
output: blockOutput,
executed: true,
executed: false,
executionTime: 0,
})
logger.debug('Initialized start block', {
blockId: startResolution.block.id,
blockType: startResolution.block.metadata?.id,
})
}
}

View File

@@ -0,0 +1,129 @@
import type { DAG } from '@/executor/dag/builder'
import type { SerializableExecutionState } from '@/executor/execution/snapshot'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionContext, ExecutionMetadata, SerializedSnapshot } from '@/executor/types'
function mapFromEntries<T>(map?: Map<string, T>): Record<string, T> | undefined {
if (!map) return undefined
return Object.fromEntries(map)
}
function setToArray<T>(set?: Set<T>): T[] | undefined {
if (!set) return undefined
return Array.from(set)
}
function serializeLoopExecutions(
loopExecutions?: Map<string, any>
): Record<string, any> | undefined {
if (!loopExecutions) return undefined
const result: Record<string, any> = {}
for (const [loopId, scope] of loopExecutions.entries()) {
let currentIterationOutputs: any
if (scope.currentIterationOutputs instanceof Map) {
currentIterationOutputs = Object.fromEntries(scope.currentIterationOutputs)
} else {
currentIterationOutputs = scope.currentIterationOutputs ?? {}
}
result[loopId] = {
...scope,
currentIterationOutputs,
}
}
return result
}
function serializeParallelExecutions(
parallelExecutions?: Map<string, any>
): Record<string, any> | undefined {
if (!parallelExecutions) return undefined
const result: Record<string, any> = {}
for (const [parallelId, scope] of parallelExecutions.entries()) {
let branchOutputs: any
if (scope.branchOutputs instanceof Map) {
branchOutputs = Object.fromEntries(scope.branchOutputs)
} else {
branchOutputs = scope.branchOutputs ?? {}
}
result[parallelId] = {
...scope,
branchOutputs,
}
}
return result
}
export function serializePauseSnapshot(
context: ExecutionContext,
triggerBlockIds: string[],
dag?: DAG
): SerializedSnapshot {
const metadataFromContext = context.metadata as ExecutionMetadata | undefined
let useDraftState: boolean
if (metadataFromContext?.useDraftState !== undefined) {
useDraftState = metadataFromContext.useDraftState
} else if (context.isDeployedContext === true) {
useDraftState = false
} else {
useDraftState = true
}
const dagIncomingEdges: Record<string, string[]> | undefined = dag
? Object.fromEntries(
Array.from(dag.nodes.entries()).map(([nodeId, node]) => [
nodeId,
Array.from(node.incomingEdges),
])
)
: undefined
const state: SerializableExecutionState = {
blockStates: Object.fromEntries(context.blockStates),
executedBlocks: Array.from(context.executedBlocks),
blockLogs: context.blockLogs,
decisions: {
router: Object.fromEntries(context.decisions.router),
condition: Object.fromEntries(context.decisions.condition),
},
completedLoops: Array.from(context.completedLoops),
loopExecutions: serializeLoopExecutions(context.loopExecutions),
parallelExecutions: serializeParallelExecutions(context.parallelExecutions),
parallelBlockMapping: mapFromEntries(context.parallelBlockMapping),
activeExecutionPath: Array.from(context.activeExecutionPath),
pendingQueue: triggerBlockIds,
dagIncomingEdges,
}
const executionMetadata = {
requestId:
(context.metadata as any)?.requestId ??
context.executionId ??
context.workflowId ??
'unknown',
executionId: context.executionId ?? 'unknown',
workflowId: context.workflowId,
workspaceId: context.workspaceId,
userId: (context.metadata as any)?.userId ?? '',
triggerType: (context.metadata as any)?.triggerType ?? 'manual',
triggerBlockId: triggerBlockIds[0],
useDraftState,
startTime: context.metadata.startTime ?? new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(
executionMetadata,
context.workflow,
{},
context.environmentVariables ?? {},
context.workflowVariables ?? {},
context.selectedOutputs ?? [],
state
)
return {
snapshot: snapshot.toJSON(),
triggerIds: triggerBlockIds,
}
}

View File

@@ -11,6 +11,8 @@ export interface ExecutionMetadata {
triggerBlockId?: string
useDraftState: boolean
startTime: string
pendingBlocks?: string[]
resumeFromSnapshot?: boolean
}
export interface ExecutionCallbacks {
@@ -33,8 +35,6 @@ export interface SerializableExecutionState {
router: Record<string, string>
condition: Record<string, string>
}
loopIterations: Record<string, number>
loopItems: Record<string, any>
completedLoops: string[]
loopExecutions?: Record<string, any>
parallelExecutions?: Record<string, any>
@@ -42,6 +42,8 @@ export interface SerializableExecutionState {
activeExecutionPath: string[]
pendingQueue?: string[]
remainingEdges?: Edge[]
dagIncomingEdges?: Record<string, string[]>
completedPauseContexts?: string[]
}
export class ExecutionSnapshot {
@@ -80,19 +82,3 @@ export class ExecutionSnapshot {
)
}
}
// TODO: Implement pause/resume functionality
//
// Future implementation should include:
// 1. executor.pause() - Captures current state mid-execution
// - Serialize ExecutionContext (blockStates, decisions, loops, etc) to state property
// - Save snapshot.toJSON() to database
// 2. executor.resume(snapshot) - Reconstructs execution from saved state
// - Load snapshot from database
// - Restore ExecutionContext from state property
// - Continue execution from pendingQueue
// 3. API endpoints:
// - POST /api/executions/[id]/pause
// - POST /api/executions/[id]/resume
// 4. Database schema:
// - execution_snapshots table with snapshot JSON column

View File

@@ -1,4 +1,9 @@
import type { NormalizedBlockOutput } from '@/executor/types'
import type { BlockStateController } from '@/executor/execution/types'
import type { BlockState, NormalizedBlockOutput } from '@/executor/types'
function normalizeLookupId(id: string): string {
return id.replace(/\d+/gu, '').replace(/_loop\d+/g, '')
}
export interface LoopScope {
iteration: number
currentIterationOutputs: Map<string, NormalizedBlockOutput>
@@ -18,53 +23,77 @@ export interface ParallelScope {
totalExpectedNodes: number
}
export class ExecutionState {
// Shared references with ExecutionContext for single source of truth
readonly blockStates: Map<
string,
{ output: NormalizedBlockOutput; executed: boolean; executionTime: number }
>
readonly executedBlocks: Set<string>
readonly loopScopes = new Map<string, LoopScope>()
readonly parallelScopes = new Map<string, ParallelScope>()
export class ExecutionState implements BlockStateController {
private readonly blockStates: Map<string, BlockState>
private readonly executedBlocks: Set<string>
constructor(
blockStates: Map<
string,
{ output: NormalizedBlockOutput; executed: boolean; executionTime: number }
>,
executedBlocks: Set<string>
) {
this.blockStates = blockStates
this.executedBlocks = executedBlocks
constructor(blockStates?: Map<string, BlockState>, executedBlocks?: Set<string>) {
this.blockStates = blockStates ?? new Map()
this.executedBlocks = executedBlocks ?? new Set()
}
getBlockOutput(blockId: string): NormalizedBlockOutput | undefined {
return this.blockStates.get(blockId)?.output
getBlockStates(): ReadonlyMap<string, BlockState> {
return this.blockStates
}
setBlockOutput(blockId: string, output: NormalizedBlockOutput): void {
this.blockStates.set(blockId, { output, executed: true, executionTime: 0 })
getExecutedBlocks(): ReadonlySet<string> {
return this.executedBlocks
}
getBlockOutput(blockId: string, currentNodeId?: string): NormalizedBlockOutput | undefined {
const direct = this.blockStates.get(blockId)?.output
if (direct !== undefined) {
return direct
}
const normalizedId = normalizeLookupId(blockId)
if (normalizedId !== blockId) {
return undefined
}
if (currentNodeId) {
const currentSuffix = currentNodeId.replace(normalizedId, '').match(/₍\d+₎/g)?.[0] ?? ''
const loopSuffix = currentNodeId.match(/_loop\d+/)?.[0] ?? ''
const withSuffix = `${blockId}${currentSuffix}${loopSuffix}`
const suffixedOutput = this.blockStates.get(withSuffix)?.output
if (suffixedOutput !== undefined) {
return suffixedOutput
}
}
for (const [storedId, state] of this.blockStates.entries()) {
if (normalizeLookupId(storedId) === blockId) {
return state.output
}
}
return undefined
}
setBlockOutput(blockId: string, output: NormalizedBlockOutput, executionTime = 0): void {
this.blockStates.set(blockId, { output, executed: true, executionTime })
this.executedBlocks.add(blockId)
}
setBlockState(blockId: string, state: BlockState): void {
this.blockStates.set(blockId, state)
if (state.executed) {
this.executedBlocks.add(blockId)
} else {
this.executedBlocks.delete(blockId)
}
}
deleteBlockState(blockId: string): void {
this.blockStates.delete(blockId)
this.executedBlocks.delete(blockId)
}
unmarkExecuted(blockId: string): void {
this.executedBlocks.delete(blockId)
}
hasExecuted(blockId: string): boolean {
return this.executedBlocks.has(blockId)
}
getLoopScope(loopId: string): LoopScope | undefined {
return this.loopScopes.get(loopId)
}
setLoopScope(loopId: string, scope: LoopScope): void {
this.loopScopes.set(loopId, scope)
}
getParallelScope(parallelId: string): ParallelScope | undefined {
return this.parallelScopes.get(parallelId)
}
setParallelScope(parallelId: string, scope: ParallelScope): void {
this.parallelScopes.set(parallelId, scope)
}
}

View File

@@ -1,4 +1,4 @@
import type { NormalizedBlockOutput } from '@/executor/types'
import type { BlockState, NormalizedBlockOutput } from '@/executor/types'
import type { SubflowType } from '@/stores/workflows/workflow/types'
export interface ContextExtensions {
@@ -10,6 +10,16 @@ export interface ContextExtensions {
edges?: Array<{ source: string; target: string }>
isDeployedContext?: boolean
isChildExecution?: boolean
resumeFromSnapshot?: boolean
resumePendingQueue?: string[]
remainingEdges?: Array<{
source: string
target: string
sourceHandle?: string
targetHandle?: string
}>
dagIncomingEdges?: Record<string, string[]>
snapshotState?: import('@/executor/execution/snapshot').SerializableExecutionState
onStream?: (streamingExecution: unknown) => Promise<void>
onBlockStart?: (
blockId: string,
@@ -37,3 +47,17 @@ export interface ContextExtensions {
export interface WorkflowInput {
[key: string]: unknown
}
export interface BlockStateReader {
getBlockOutput(blockId: string, currentNodeId?: string): NormalizedBlockOutput | undefined
hasExecuted(blockId: string): boolean
}
export interface BlockStateWriter {
setBlockOutput(blockId: string, output: NormalizedBlockOutput, executionTime?: number): void
setBlockState(blockId: string, state: BlockState): void
deleteBlockState(blockId: string): void
unmarkExecuted(blockId: string): void
}
export type BlockStateController = BlockStateReader & BlockStateWriter

View File

@@ -109,8 +109,7 @@ describe('AgentBlockHandler', () => {
metadata: { startTime: new Date().toISOString(), duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),

View File

@@ -34,8 +34,6 @@ export class AgentBlockHandler implements BlockHandler {
block: SerializedBlock,
inputs: AgentInputs
): Promise<BlockOutput | StreamingExecution> {
logger.info(`Executing agent block: ${block.id}`)
const responseFormat = this.parseResponseFormat(inputs.responseFormat)
const model = inputs.model || AGENT.DEFAULT_MODEL
const providerId = getProviderFromModel(model)
@@ -76,9 +74,6 @@ export class AgentBlockHandler implements BlockHandler {
const trimmedValue = responseFormat.trim()
if (trimmedValue.startsWith('<') && trimmedValue.includes('>')) {
logger.info('Response format contains variable reference:', {
value: trimmedValue,
})
return undefined
}
@@ -163,10 +158,8 @@ export class AgentBlockHandler implements BlockHandler {
if (tool.code) {
base.executeFunction = async (callParams: Record<string, any>) => {
// Merge user-provided parameters with LLM-generated parameters
const mergedParams = mergeToolParameters(userProvidedParams, callParams)
// Collect block outputs for tag resolution
const { blockData, blockNameMapping } = collectBlockData(ctx)
const result = await executeTool(
@@ -257,8 +250,6 @@ export class AgentBlockHandler implements BlockHandler {
params: userProvidedParams,
usageControl: tool.usageControl || 'auto',
executeFunction: async (callParams: Record<string, any>) => {
logger.info(`Executing MCP tool ${toolName} on server ${serverId}`)
const headers = await buildAuthHeaders()
const execUrl = buildAPIUrl('/api/mcp/tools/execute')
@@ -565,8 +556,6 @@ export class AgentBlockHandler implements BlockHandler {
responseFormat: any,
providerStartTime: number
) {
logger.info('Using HTTP provider request (browser environment)')
const url = buildAPIUrl('/api/providers')
const response = await fetch(url.toString(), {
method: 'POST',
@@ -589,10 +578,8 @@ export class AgentBlockHandler implements BlockHandler {
'HTTP response'
)
// Check if this is a streaming response
const contentType = response.headers.get('Content-Type')
if (contentType?.includes(HTTP.CONTENT_TYPE.EVENT_STREAM)) {
logger.info('Received streaming response')
return this.handleStreamingResponse(response, block)
}
@@ -664,15 +651,6 @@ export class AgentBlockHandler implements BlockHandler {
: response && typeof response === 'object' && 'stream' in response
? 'streaming-execution'
: 'json'
logger.info('Provider request completed successfully', {
provider,
model,
workflowId: ctx.workflowId,
blockId: block.id,
executionTime,
responseType,
})
}
private handleExecutionError(
@@ -745,7 +723,6 @@ export class AgentBlockHandler implements BlockHandler {
block: SerializedBlock
): StreamingExecution {
const streamingExec = response as StreamingExecution
logger.info(`Received StreamingExecution for block ${block.id}`)
if (streamingExec.execution.output) {
const execution = streamingExec.execution as any
@@ -786,16 +763,11 @@ export class AgentBlockHandler implements BlockHandler {
try {
const extractedJson = JSON.parse(content.trim())
logger.info('Successfully parsed structured response content')
return {
...extractedJson,
...this.createResponseMetadata(result),
}
} catch (error) {
logger.info('JSON parsing failed', {
error: error instanceof Error ? error.message : 'Unknown error',
})
logger.error('LLM did not adhere to structured response format:', {
content: content.substring(0, 200) + (content.length > 200 ? '...' : ''),
responseFormat: responseFormat,

View File

@@ -36,8 +36,7 @@ describe('ApiBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: new Set(),

View File

@@ -1,7 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, HTTP } from '@/executor/consts'
import type { BlockHandler, ExecutionContext } from '@/executor/types'
import { stringifyJSON } from '@/executor/utils/json'
import type { SerializedBlock } from '@/serializer/types'
import { executeTool } from '@/tools'
import { getTool } from '@/tools/utils'
@@ -64,24 +63,13 @@ export class ApiBlockHandler implements BlockHandler {
const trimmedBody = processedInputs.body.trim()
if (trimmedBody.startsWith('{') || trimmedBody.startsWith('[')) {
processedInputs.body = JSON.parse(trimmedBody)
logger.info(
'[ApiBlockHandler] Parsed JSON body:',
stringifyJSON(processedInputs.body)
)
}
} catch (e) {
logger.info('[ApiBlockHandler] Failed to parse body as JSON, using as string:', e)
}
} catch (e) {}
} else if (processedInputs.body === null) {
processedInputs.body = undefined
}
}
logger.info(
'[ApiBlockHandler] Final processed request body:',
stringifyJSON(processedInputs.body)
)
const result = await executeTool(
block.config.tool,
{

View File

@@ -101,8 +101,7 @@ describe('ConditionBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {}, // Now set the context's env vars
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set([mockSourceBlock.id]),
activeExecutionPath: new Set(),
workflow: mockWorkflow as SerializedWorkflow,
@@ -333,13 +332,18 @@ describe('ConditionBlockHandler', () => {
it('should handle missing source block output gracefully', async () => {
const conditions = [{ id: 'cond1', title: 'if', value: 'true' }]
const inputs = { conditions: JSON.stringify(conditions) }
mockContext.blockStates.delete(mockSourceBlock.id)
// Create a new context with empty blockStates instead of trying to delete from readonly map
const contextWithoutSource = {
...mockContext,
blockStates: new Map<string, BlockState>(),
}
mockResolver.resolveVariableReferences.mockReturnValue('true')
mockResolver.resolveBlockReferences.mockReturnValue('true')
mockResolver.resolveEnvVariables.mockReturnValue('true')
const result = await handler.execute(mockContext, mockBlock, inputs)
const result = await handler.execute(contextWithoutSource, mockBlock, inputs)
expect(result).toHaveProperty('conditionResult', true)
expect(result).toHaveProperty('selectedConditionId', 'cond1')
@@ -393,15 +397,13 @@ describe('ConditionBlockHandler', () => {
)
})
it('should use loop context during evaluation if available', async () => {
it('falls back to else path when loop context data is unavailable', async () => {
const conditions = [
{ id: 'cond1', title: 'if', value: 'context.item === "apple"' },
{ id: 'else1', title: 'else', value: '' },
]
const inputs = { conditions: JSON.stringify(conditions) }
mockContext.loopItems.set(mockBlock.id, { item: 'apple' })
// Mock the full resolution pipeline
mockResolver.resolveVariableReferences.mockReturnValue('context.item === "apple"')
mockResolver.resolveBlockReferences.mockReturnValue('context.item === "apple"')
@@ -409,7 +411,7 @@ describe('ConditionBlockHandler', () => {
const result = await handler.execute(mockContext, mockBlock, inputs)
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('cond1')
expect((result as any).selectedConditionId).toBe('cond1')
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('else1')
expect((result as any).selectedConditionId).toBe('else1')
})
})

View File

@@ -17,9 +17,7 @@ export async function evaluateConditionExpression(
resolver: any,
providedEvalContext?: Record<string, any>
): Promise<boolean> {
const evalContext = providedEvalContext || {
...(ctx.loopItems.get(block.id) || {}),
}
const evalContext = providedEvalContext || {}
let resolvedConditionValue = conditionExpression
try {
@@ -27,9 +25,6 @@ export async function evaluateConditionExpression(
const resolvedVars = resolver.resolveVariableReferences(conditionExpression, block)
const resolvedRefs = resolver.resolveBlockReferences(resolvedVars, ctx, block)
resolvedConditionValue = resolver.resolveEnvVariables(resolvedRefs)
logger.info(
`Resolved condition: from "${conditionExpression}" to "${resolvedConditionValue}"`
)
}
} catch (resolveError: any) {
logger.error(`Failed to resolve references in condition: ${resolveError.message}`, {
@@ -40,12 +35,10 @@ export async function evaluateConditionExpression(
}
try {
logger.info(`Evaluating resolved condition: "${resolvedConditionValue}"`, { evalContext })
const conditionMet = new Function(
'context',
`with(context) { return ${resolvedConditionValue} }`
)(evalContext)
logger.info(`Condition evaluated to: ${conditionMet}`)
return Boolean(conditionMet)
} catch (evalError: any) {
logger.error(`Failed to evaluate condition: ${evalError.message}`, {
@@ -78,10 +71,6 @@ export class ConditionBlockHandler implements BlockHandler {
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput> {
logger.info(`Executing condition block: ${block.id}`, {
rawConditionsInput: inputs.conditions,
})
const conditions = this.parseConditions(inputs.conditions)
const sourceBlockId = ctx.workflow?.connections.find((conn) => conn.target === block.id)?.source
@@ -103,10 +92,6 @@ export class ConditionBlockHandler implements BlockHandler {
throw new Error(`Target block ${selectedConnection?.target} not found`)
}
logger.info(
`Condition block ${block.id} selected path: ${selectedCondition.title} (${selectedCondition.id}) -> ${targetBlock.metadata?.name || targetBlock.id}`
)
const decisionKey = ctx.currentVirtualBlockId || block.id
ctx.decisions.condition.set(decisionKey, selectedCondition.id)
@@ -126,7 +111,6 @@ export class ConditionBlockHandler implements BlockHandler {
private parseConditions(input: any): Array<{ id: string; title: string; value: string }> {
try {
const conditions = Array.isArray(input) ? input : JSON.parse(input || '[]')
logger.info('Parsed conditions:', conditions)
return conditions
} catch (error: any) {
logger.error('Failed to parse conditions:', { input, error })
@@ -139,9 +123,7 @@ export class ConditionBlockHandler implements BlockHandler {
blockId: string,
sourceBlockId?: string
): Record<string, any> {
let evalContext: Record<string, any> = {
...(ctx.loopItems.get(blockId) || {}),
}
let evalContext: Record<string, any> = {}
if (sourceBlockId) {
const sourceOutput = ctx.blockStates.get(sourceBlockId)?.output
@@ -153,7 +135,6 @@ export class ConditionBlockHandler implements BlockHandler {
}
}
logger.info('Base eval context:', evalContext)
return evalContext
}
@@ -185,7 +166,6 @@ export class ConditionBlockHandler implements BlockHandler {
this.resolver,
evalContext
)
logger.info(`Condition "${condition.title}" (${condition.id}) met: ${conditionMet}`)
const connection = this.findConnectionForCondition(outgoingConnections, condition.id)

View File

@@ -40,8 +40,7 @@ describe('EvaluatorBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),

View File

@@ -30,26 +30,28 @@ export class EvaluatorBlockHandler implements BlockHandler {
const processedContent = this.processContent(inputs.content)
// Parse system prompt object with robust error handling
let systemPromptObj: { systemPrompt: string; responseFormat: any } = {
systemPrompt: '',
responseFormat: null,
}
logger.info('Inputs for evaluator:', inputs)
const metrics = Array.isArray(inputs.metrics) ? inputs.metrics : []
let metrics: any[]
if (Array.isArray(inputs.metrics)) {
metrics = inputs.metrics
} else {
metrics = []
}
logger.info('Metrics for evaluator:', metrics)
const metricDescriptions = metrics
.filter((m: any) => m?.name && m.range) // Filter out invalid/incomplete metrics
.filter((m: any) => m?.name && m.range)
.map((m: any) => `"${m.name}" (${m.range.min}-${m.range.max}): ${m.description || ''}`)
.join('\n')
// Create a response format structure
const responseProperties: Record<string, any> = {}
metrics.forEach((m: any) => {
// Ensure metric and name are valid before using them
if (m?.name) {
responseProperties[m.name.toLowerCase()] = { type: 'number' } // Use lowercase for consistency
responseProperties[m.name.toLowerCase()] = { type: 'number' }
} else {
logger.warn('Skipping invalid metric entry during response format generation:', m)
}
@@ -77,7 +79,6 @@ export class EvaluatorBlockHandler implements BlockHandler {
},
}
// Ensure we have a system prompt
if (!systemPromptObj.systemPrompt) {
systemPromptObj.systemPrompt =
'Evaluate the content and provide scores for each metric as JSON.'
@@ -155,7 +156,10 @@ export class EvaluatorBlockHandler implements BlockHandler {
if (typeof content === 'string') {
if (isJSONString(content)) {
const parsed = parseJSON(content, null)
return parsed ? stringifyJSON(parsed) : content
if (parsed) {
return stringifyJSON(parsed)
}
return content
}
return content
}
@@ -196,7 +200,12 @@ export class EvaluatorBlockHandler implements BlockHandler {
metrics: any
): Record<string, number> {
const metricScores: Record<string, number> = {}
const validMetrics = Array.isArray(metrics) ? metrics : []
let validMetrics: any[]
if (Array.isArray(metrics)) {
validMetrics = metrics
} else {
validMetrics = []
}
if (Object.keys(parsedContent).length === 0) {
validMetrics.forEach((metric: any) => {

View File

@@ -46,8 +46,7 @@ describe('FunctionBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: new Set(),

View File

@@ -38,8 +38,7 @@ describe('GenericBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: new Set(),

View File

@@ -17,8 +17,6 @@ export class GenericBlockHandler implements BlockHandler {
block: SerializedBlock,
inputs: Record<string, any>
): Promise<any> {
logger.info(`Executing block: ${block.id} (Type: ${block.metadata?.id})`)
const isMcpTool = block.config.tool?.startsWith('mcp-')
let tool = null
@@ -38,10 +36,6 @@ export class GenericBlockHandler implements BlockHandler {
try {
const transformedParams = blockConfig.tools.config.params(inputs)
finalInputs = { ...inputs, ...transformedParams }
logger.info(`Applied parameter transformation for block type: ${blockType}`, {
original: inputs,
transformed: transformedParams,
})
} catch (error) {
logger.warn(`Failed to apply parameter transformation for block type ${blockType}:`, {
error: error instanceof Error ? error.message : String(error),
@@ -50,14 +44,6 @@ export class GenericBlockHandler implements BlockHandler {
}
}
logger.info(`[GenericBlockHandler] Calling executeTool for ${block.config.tool}`, {
blockId: block.id,
blockName: block.metadata?.name,
originalInputs: inputs,
finalInputs: finalInputs,
tool: block.config.tool,
})
try {
const result = await executeTool(
block.config.tool,

View File

@@ -4,6 +4,7 @@ import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-h
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
import { PauseResumeBlockHandler } from '@/executor/handlers/pause-resume/pause-resume-handler'
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
@@ -19,6 +20,7 @@ export {
FunctionBlockHandler,
GenericBlockHandler,
ResponseBlockHandler,
PauseResumeBlockHandler,
RouterBlockHandler,
TriggerBlockHandler,
VariablesBlockHandler,

View File

@@ -0,0 +1,668 @@
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import type { BlockOutput } from '@/blocks/types'
import {
BlockType,
buildResumeApiUrl,
buildResumeUiUrl,
type FieldType,
HTTP,
PAUSE_RESUME,
} from '@/executor/consts'
import {
generatePauseContextId,
mapNodeMetadataToPauseScopes,
} from '@/executor/pause-resume/utils.ts'
import type { BlockHandler, ExecutionContext, PauseMetadata } from '@/executor/types'
import { collectBlockData } from '@/executor/utils/block-data'
import type { SerializedBlock } from '@/serializer/types'
import { normalizeBlockName } from '@/stores/workflows/utils'
import { executeTool } from '@/tools'
const logger = createLogger('PauseResumeBlockHandler')
interface JSONProperty {
id: string
name: string
type: FieldType
value: any
collapsed?: boolean
}
interface ResponseStructureEntry {
name: string
type: string
value: any
}
interface NormalizedInputField {
id: string
name: string
label: string
type: string
description?: string
placeholder?: string
value?: any
required?: boolean
options?: any[]
}
interface NotificationToolResult {
toolId: string
title?: string
operation?: string
success: boolean
durationMs?: number
}
export class PauseResumeBlockHandler implements BlockHandler {
canHandle(block: SerializedBlock): boolean {
return block.metadata?.id === BlockType.APPROVAL
}
async execute(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput> {
return this.executeWithNode(ctx, block, inputs, {
nodeId: block.id,
})
}
async executeWithNode(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): Promise<BlockOutput> {
try {
const operation = inputs.operation ?? PAUSE_RESUME.OPERATION.HUMAN
const { parallelScope, loopScope } = mapNodeMetadataToPauseScopes(ctx, nodeMetadata)
const contextId = generatePauseContextId(block.id, nodeMetadata, loopScope)
const timestamp = new Date().toISOString()
const executionId = ctx.executionId ?? ctx.metadata?.executionId
const workflowId = ctx.workflowId
let resumeLinks: typeof pauseMetadata.resumeLinks | undefined
if (executionId && workflowId) {
try {
const baseUrl = getBaseUrl()
resumeLinks = {
apiUrl: buildResumeApiUrl(baseUrl, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(baseUrl, workflowId, executionId),
contextId,
executionId,
workflowId,
}
} catch (error) {
logger.warn('Failed to get base URL, using relative paths', { error })
resumeLinks = {
apiUrl: buildResumeApiUrl(undefined, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(undefined, workflowId, executionId),
contextId,
executionId,
workflowId,
}
}
}
const normalizedInputFormat = this.normalizeInputFormat(inputs.inputFormat)
const responseStructure = this.normalizeResponseStructure(inputs.builderData)
let responseData: any
let statusCode: number
let responseHeaders: Record<string, string>
if (operation === PAUSE_RESUME.OPERATION.API) {
const parsed = this.parseResponseData(inputs)
if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
responseData = {
...parsed,
operation,
responseStructure:
parsed.responseStructure && Array.isArray(parsed.responseStructure)
? parsed.responseStructure
: responseStructure,
}
} else {
responseData = parsed
}
statusCode = this.parseStatus(inputs.status)
responseHeaders = this.parseHeaders(inputs.headers)
} else {
responseData = {
operation,
responseStructure,
inputFormat: normalizedInputFormat,
submission: null,
}
statusCode = HTTP.STATUS.OK
responseHeaders = { 'Content-Type': HTTP.CONTENT_TYPE.JSON }
}
let notificationResults: NotificationToolResult[] | undefined
if (
operation === PAUSE_RESUME.OPERATION.HUMAN &&
inputs.notification &&
Array.isArray(inputs.notification)
) {
notificationResults = await this.executeNotificationTools(ctx, block, inputs.notification, {
resumeLinks,
executionId,
workflowId,
inputFormat: normalizedInputFormat,
responseStructure,
operation,
})
}
const responseDataWithResume =
resumeLinks &&
responseData &&
typeof responseData === 'object' &&
!Array.isArray(responseData)
? { ...responseData, _resume: resumeLinks }
: responseData
const pauseMetadata: PauseMetadata = {
contextId,
blockId: nodeMetadata.nodeId,
response: {
data: responseDataWithResume,
status: statusCode,
headers: responseHeaders,
},
timestamp,
parallelScope,
loopScope,
resumeLinks,
}
const responseOutput: Record<string, any> = {
data: responseDataWithResume,
status: statusCode,
headers: responseHeaders,
operation,
}
if (operation === PAUSE_RESUME.OPERATION.HUMAN) {
responseOutput.responseStructure = responseStructure
responseOutput.inputFormat = normalizedInputFormat
responseOutput.submission = null
}
if (resumeLinks) {
responseOutput.resume = resumeLinks
}
const structuredFields: Record<string, any> = {}
if (operation === PAUSE_RESUME.OPERATION.HUMAN) {
for (const field of normalizedInputFormat) {
if (field.name) {
structuredFields[field.name] = field.value !== undefined ? field.value : null
}
}
}
const output: Record<string, any> = {
...structuredFields,
response: responseOutput,
_pauseMetadata: pauseMetadata,
}
if (notificationResults && notificationResults.length > 0) {
output.notificationResults = notificationResults
}
if (resumeLinks) {
output.uiUrl = resumeLinks.uiUrl
output.apiUrl = resumeLinks.apiUrl
}
return output
} catch (error: any) {
logger.error('Pause resume block execution failed:', error)
return {
response: {
data: {
error: 'Pause resume block execution failed',
message: error.message || 'Unknown error',
},
status: HTTP.STATUS.SERVER_ERROR,
headers: { 'Content-Type': HTTP.CONTENT_TYPE.JSON },
},
}
}
}
private parseResponseData(inputs: Record<string, any>): any {
const dataMode = inputs.dataMode || 'structured'
if (dataMode === 'json' && inputs.data) {
if (typeof inputs.data === 'string') {
try {
return JSON.parse(inputs.data)
} catch (error) {
logger.warn('Failed to parse JSON data, returning as string:', error)
return inputs.data
}
} else if (typeof inputs.data === 'object' && inputs.data !== null) {
return inputs.data
}
return inputs.data
}
if (dataMode === 'structured' && inputs.builderData) {
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
return this.parseObjectStrings(convertedData)
}
return inputs.data || {}
}
private normalizeResponseStructure(
builderData?: JSONProperty[],
prefix = ''
): ResponseStructureEntry[] {
if (!Array.isArray(builderData)) {
return []
}
const entries: ResponseStructureEntry[] = []
for (const prop of builderData) {
const fieldName = typeof prop.name === 'string' ? prop.name.trim() : ''
if (!fieldName) continue
const path = prefix ? `${prefix}.${fieldName}` : fieldName
if (prop.type === 'object' && Array.isArray(prop.value)) {
const nested = this.normalizeResponseStructure(prop.value, path)
if (nested.length > 0) {
entries.push(...nested)
continue
}
}
const value = this.convertPropertyValue(prop)
entries.push({
name: path,
type: prop.type,
value,
})
}
return entries
}
private normalizeInputFormat(inputFormat: any): NormalizedInputField[] {
if (!Array.isArray(inputFormat)) {
return []
}
return inputFormat
.map((field: any, index: number) => {
const name = typeof field?.name === 'string' ? field.name.trim() : ''
if (!name) return null
const id =
typeof field?.id === 'string' && field.id.length > 0 ? field.id : `field_${index}`
const label =
typeof field?.label === 'string' && field.label.trim().length > 0
? field.label.trim()
: name
const type =
typeof field?.type === 'string' && field.type.trim().length > 0 ? field.type : 'string'
const description =
typeof field?.description === 'string' && field.description.trim().length > 0
? field.description.trim()
: undefined
const placeholder =
typeof field?.placeholder === 'string' && field.placeholder.trim().length > 0
? field.placeholder.trim()
: undefined
const required = field?.required === true
const options = Array.isArray(field?.options) ? field.options : undefined
return {
id,
name,
label,
type,
description,
placeholder,
value: field?.value,
required,
options,
} as NormalizedInputField
})
.filter((field): field is NormalizedInputField => field !== null)
}
private convertBuilderDataToJson(builderData: JSONProperty[]): any {
if (!Array.isArray(builderData)) {
return {}
}
const result: any = {}
for (const prop of builderData) {
if (!prop.name || !prop.name.trim()) {
continue
}
const value = this.convertPropertyValue(prop)
result[prop.name] = value
}
return result
}
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
if (!Array.isArray(builderData) || builderData.length === 0) {
return '{\n \n}'
}
const result: any = {}
for (const prop of builderData) {
if (!prop.name || !prop.name.trim()) {
continue
}
result[prop.name] = prop.value
}
let jsonString = JSON.stringify(result, null, 2)
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
return jsonString
}
private convertPropertyValue(prop: JSONProperty): any {
switch (prop.type) {
case 'object':
return this.convertObjectValue(prop.value)
case 'array':
return this.convertArrayValue(prop.value)
case 'number':
return this.convertNumberValue(prop.value)
case 'boolean':
return this.convertBooleanValue(prop.value)
case 'files':
return prop.value
default:
return prop.value
}
}
private convertObjectValue(value: any): any {
if (Array.isArray(value)) {
return this.convertBuilderDataToJson(value)
}
if (typeof value === 'string' && !this.isVariableReference(value)) {
return this.tryParseJson(value, value)
}
return value
}
private convertArrayValue(value: any): any {
if (Array.isArray(value)) {
return value.map((item: any) => this.convertArrayItem(item))
}
if (typeof value === 'string' && !this.isVariableReference(value)) {
const parsed = this.tryParseJson(value, value)
return Array.isArray(parsed) ? parsed : value
}
return value
}
private convertArrayItem(item: any): any {
if (typeof item !== 'object' || !item.type) {
return item
}
if (item.type === 'object' && Array.isArray(item.value)) {
return this.convertBuilderDataToJson(item.value)
}
if (item.type === 'array' && Array.isArray(item.value)) {
return item.value.map((subItem: any) =>
typeof subItem === 'object' && subItem.type ? subItem.value : subItem
)
}
return item.value
}
private convertNumberValue(value: any): any {
if (this.isVariableReference(value)) {
return value
}
const numValue = Number(value)
return Number.isNaN(numValue) ? value : numValue
}
private convertBooleanValue(value: any): any {
if (this.isVariableReference(value)) {
return value
}
return value === 'true' || value === true
}
private tryParseJson(jsonString: string, fallback: any): any {
try {
return JSON.parse(jsonString)
} catch {
return fallback
}
}
private isVariableReference(value: any): boolean {
return typeof value === 'string' && value.trim().startsWith('<') && value.trim().includes('>')
}
private parseObjectStrings(data: any): any {
if (typeof data === 'string') {
try {
const parsed = JSON.parse(data)
if (typeof parsed === 'object' && parsed !== null) {
return this.parseObjectStrings(parsed)
}
return parsed
} catch {
return data
}
} else if (Array.isArray(data)) {
return data.map((item) => this.parseObjectStrings(item))
} else if (typeof data === 'object' && data !== null) {
const result: any = {}
for (const [key, value] of Object.entries(data)) {
result[key] = this.parseObjectStrings(value)
}
return result
}
return data
}
private parseStatus(status?: string): number {
if (!status) return HTTP.STATUS.OK
const parsed = Number(status)
if (Number.isNaN(parsed) || parsed < 100 || parsed > 599) {
return HTTP.STATUS.OK
}
return parsed
}
private parseHeaders(
headers: {
id: string
cells: { Key: string; Value: string }
}[]
): Record<string, string> {
const defaultHeaders = { 'Content-Type': HTTP.CONTENT_TYPE.JSON }
if (!headers) return defaultHeaders
const headerObj = headers.reduce((acc: Record<string, string>, header) => {
if (header?.cells?.Key && header?.cells?.Value) {
acc[header.cells.Key] = header.cells.Value
}
return acc
}, {})
return { ...defaultHeaders, ...headerObj }
}
private async executeNotificationTools(
ctx: ExecutionContext,
block: SerializedBlock,
tools: any[],
context: {
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
executionId?: string
workflowId?: string
inputFormat?: NormalizedInputField[]
responseStructure?: ResponseStructureEntry[]
operation?: string
}
): Promise<NotificationToolResult[]> {
if (!tools || tools.length === 0) {
return []
}
const { blockData: collectedBlockData, blockNameMapping: collectedBlockNameMapping } =
collectBlockData(ctx)
const blockDataWithPause: Record<string, any> = { ...collectedBlockData }
const blockNameMappingWithPause: Record<string, string> = { ...collectedBlockNameMapping }
const pauseBlockId = block.id
const pauseBlockName = block.metadata?.name
const pauseOutput: Record<string, any> = {
...(blockDataWithPause[pauseBlockId] || {}),
}
if (context.resumeLinks) {
if (context.resumeLinks.uiUrl) {
pauseOutput.uiUrl = context.resumeLinks.uiUrl
}
if (context.resumeLinks.apiUrl) {
pauseOutput.apiUrl = context.resumeLinks.apiUrl
}
}
if (Array.isArray(context.inputFormat)) {
for (const field of context.inputFormat) {
if (field?.name) {
const fieldName = field.name.trim()
if (fieldName.length > 0 && !(fieldName in pauseOutput)) {
pauseOutput[fieldName] = field.value !== undefined ? field.value : null
}
}
}
}
blockDataWithPause[pauseBlockId] = pauseOutput
if (pauseBlockName) {
blockNameMappingWithPause[pauseBlockName] = pauseBlockId
blockNameMappingWithPause[normalizeBlockName(pauseBlockName)] = pauseBlockId
}
const notificationPromises = tools.map<Promise<NotificationToolResult>>(async (toolConfig) => {
const startTime = Date.now()
try {
const toolId = toolConfig.toolId
if (!toolId) {
logger.warn('Notification tool missing toolId', { toolConfig })
return {
toolId: 'unknown',
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
}
}
const toolParams = {
...toolConfig.params,
_pauseContext: {
resumeApiUrl: context.resumeLinks?.apiUrl,
resumeUiUrl: context.resumeLinks?.uiUrl,
executionId: context.executionId,
workflowId: context.workflowId,
contextId: context.resumeLinks?.contextId,
inputFormat: context.inputFormat,
responseStructure: context.responseStructure,
operation: context.operation,
},
_context: {
workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId,
},
blockData: blockDataWithPause,
blockNameMapping: blockNameMappingWithPause,
}
const result = await executeTool(toolId, toolParams, false, false, ctx)
const durationMs = Date.now() - startTime
if (!result.success) {
logger.warn('Notification tool execution failed', {
toolId,
error: result.error,
})
return {
toolId,
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
durationMs,
}
}
return {
toolId,
title: toolConfig.title,
operation: toolConfig.operation,
success: true,
durationMs,
}
} catch (error) {
logger.error('Error executing notification tool', { error, toolConfig })
return {
toolId: toolConfig.toolId || 'unknown',
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
}
}
})
return Promise.all(notificationPromises)
}
}

View File

@@ -5,19 +5,20 @@
* Creates handlers for real user blocks (not infrastructure like sentinels).
*/
import { AgentBlockHandler } from '@/executor/handlers/agent/agent-handler'
import { ApiBlockHandler } from '@/executor/handlers/api/api-handler'
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
import { PauseResumeBlockHandler } from '@/executor/handlers/pause-resume/pause-resume-handler'
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
import { VariablesBlockHandler } from '@/executor/handlers/variables/variables-handler'
import { WaitBlockHandler } from '@/executor/handlers/wait/wait-handler'
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
import type { BlockHandler } from '@/executor/types'
import { AgentBlockHandler } from './agent/agent-handler'
import { ApiBlockHandler } from './api/api-handler'
import { ConditionBlockHandler } from './condition/condition-handler'
import { EvaluatorBlockHandler } from './evaluator/evaluator-handler'
import { FunctionBlockHandler } from './function/function-handler'
import { GenericBlockHandler } from './generic/generic-handler'
import { ResponseBlockHandler } from './response/response-handler'
import { RouterBlockHandler } from './router/router-handler'
import { TriggerBlockHandler } from './trigger/trigger-handler'
import { VariablesBlockHandler } from './variables/variables-handler'
import { WaitBlockHandler } from './wait/wait-handler'
import { WorkflowBlockHandler } from './workflow/workflow-handler'
/**
* Create all block handlers
@@ -27,20 +28,19 @@ import { WorkflowBlockHandler } from './workflow/workflow-handler'
*/
export function createBlockHandlers(): BlockHandler[] {
return [
// Core block handlers
new TriggerBlockHandler(),
new FunctionBlockHandler(),
new ApiBlockHandler(),
new ConditionBlockHandler(),
new RouterBlockHandler(),
new ResponseBlockHandler(),
new PauseResumeBlockHandler(),
new AgentBlockHandler(),
new VariablesBlockHandler(),
new WorkflowBlockHandler(),
new WaitBlockHandler(),
new EvaluatorBlockHandler(),
// Generic handler must be last (fallback)
new GenericBlockHandler(),
]
}

View File

@@ -63,7 +63,6 @@ export class ResponseBlockHandler implements BlockHandler {
const dataMode = inputs.dataMode || 'structured'
if (dataMode === 'json' && inputs.data) {
// Handle JSON mode - data comes from code editor
if (typeof inputs.data === 'string') {
try {
return JSON.parse(inputs.data)
@@ -72,19 +71,16 @@ export class ResponseBlockHandler implements BlockHandler {
return inputs.data
}
} else if (typeof inputs.data === 'object' && inputs.data !== null) {
// Data is already an object, return as-is
return inputs.data
}
return inputs.data
}
if (dataMode === 'structured' && inputs.builderData) {
// Handle structured mode - convert builderData to JSON
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
return this.parseObjectStrings(convertedData)
}
// Fallback to inputs.data for backward compatibility
return inputs.data || {}
}
@@ -107,7 +103,6 @@ export class ResponseBlockHandler implements BlockHandler {
return result
}
// Static method for UI conversion from Builder to Editor mode
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
if (!Array.isArray(builderData) || builderData.length === 0) {
return '{\n \n}'
@@ -120,15 +115,11 @@ export class ResponseBlockHandler implements BlockHandler {
continue
}
// For UI display, keep variable references as-is without processing
result[prop.name] = prop.value
}
// Convert to JSON string, then replace quoted variable references with unquoted ones
let jsonString = JSON.stringify(result, null, 2)
// Replace quoted variable references with unquoted ones
// Pattern: "<variable.name>" -> <variable.name>
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
return jsonString
@@ -145,7 +136,6 @@ export class ResponseBlockHandler implements BlockHandler {
case 'boolean':
return this.convertBooleanValue(prop.value)
case 'files':
// File values should be passed through as-is (UserFile objects)
return prop.value
default:
return prop.value
@@ -161,7 +151,6 @@ export class ResponseBlockHandler implements BlockHandler {
return this.tryParseJson(value, value)
}
// Keep variable references or other values as-is (they'll be resolved later)
return value
}
@@ -172,10 +161,12 @@ export class ResponseBlockHandler implements BlockHandler {
if (typeof value === 'string' && !this.isVariableReference(value)) {
const parsed = this.tryParseJson(value, value)
return Array.isArray(parsed) ? parsed : value
if (Array.isArray(parsed)) {
return parsed
}
return value
}
// Keep variable references or other values as-is
return value
}
@@ -189,9 +180,12 @@ export class ResponseBlockHandler implements BlockHandler {
}
if (item.type === 'array' && Array.isArray(item.value)) {
return item.value.map((subItem: any) =>
typeof subItem === 'object' && subItem.type ? subItem.value : subItem
)
return item.value.map((subItem: any) => {
if (typeof subItem === 'object' && subItem.type) {
return subItem.value
}
return subItem
})
}
return item.value
@@ -203,7 +197,10 @@ export class ResponseBlockHandler implements BlockHandler {
}
const numValue = Number(value)
return Number.isNaN(numValue) ? value : numValue
if (Number.isNaN(numValue)) {
return value
}
return numValue
}
private convertBooleanValue(value: any): any {
@@ -228,15 +225,14 @@ export class ResponseBlockHandler implements BlockHandler {
private parseObjectStrings(data: any): any {
if (typeof data === 'string') {
// Try to parse strings that might be JSON objects
try {
const parsed = JSON.parse(data)
if (typeof parsed === 'object' && parsed !== null) {
return this.parseObjectStrings(parsed) // Recursively parse nested objects
return this.parseObjectStrings(parsed)
}
return parsed
} catch {
return data // Return as string if not valid JSON
return data
}
} else if (Array.isArray(data)) {
return data.map((item) => this.parseObjectStrings(item))

View File

@@ -65,8 +65,7 @@ describe('RouterBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
completedLoops: new Set(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),

View File

@@ -58,16 +58,13 @@ export class RouterBlockHandler implements BlockHandler {
})
if (!response.ok) {
// Try to extract a helpful error message
let errorMessage = `Provider API request failed with status ${response.status}`
try {
const errorData = await response.json()
if (errorData.error) {
errorMessage = errorData.error
}
} catch (_e) {
// If JSON parsing fails, use the original error message
}
} catch (_e) {}
throw new Error(errorMessage)
}
@@ -90,7 +87,6 @@ export class RouterBlockHandler implements BlockHandler {
total: DEFAULTS.TOKENS.TOTAL,
}
// Calculate cost based on token usage, similar to how providers do it
const cost = calculateCost(
result.model,
tokens.prompt || DEFAULTS.TOKENS.PROMPT,
@@ -116,7 +112,7 @@ export class RouterBlockHandler implements BlockHandler {
blockType: chosenBlock.type || DEFAULTS.BLOCK_TYPE,
blockTitle: chosenBlock.title || DEFAULTS.BLOCK_TITLE,
},
selectedRoute: String(chosenBlock.id), // Used by ExecutionEngine to activate the correct edge
selectedRoute: String(chosenBlock.id),
} as BlockOutput
} catch (error) {
logger.error('Router execution failed:', error)

View File

@@ -19,8 +19,7 @@ describe('TriggerBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: new Set(),

View File

@@ -43,18 +43,10 @@ export class TriggerBlockHandler implements BlockHandler {
if (starterState?.output && Object.keys(starterState.output).length > 0) {
const starterOutput = starterState.output
// Generic handling for webhook triggers - extract provider-specific data
// Check if this is a webhook execution
if (starterOutput.webhook?.data) {
const webhookData = starterOutput.webhook?.data || {}
const provider = webhookData.provider
logger.debug(`Processing webhook trigger for block ${block.id}`, {
provider,
blockType: block.metadata?.id,
})
if (provider === 'github') {
const payloadSource = webhookData.payload || {}
return {
@@ -136,21 +128,14 @@ export class TriggerBlockHandler implements BlockHandler {
return result
}
logger.debug(`Returning starter block output for trigger block ${block.id}`, {
starterOutputKeys: Object.keys(starterOutput),
})
return starterOutput
}
}
if (inputs && Object.keys(inputs).length > 0) {
logger.debug(`Returning trigger inputs for block ${block.id}`, {
inputKeys: Object.keys(inputs),
})
return inputs
}
logger.debug(`No inputs provided for trigger block ${block.id}, returning empty object`)
return {}
}
@@ -165,10 +150,6 @@ export class TriggerBlockHandler implements BlockHandler {
const existingState = ctx.blockStates.get(block.id)
if (existingState?.output && Object.keys(existingState.output).length > 0) {
logger.debug('Returning pre-initialized starter block output', {
blockId: block.id,
outputKeys: Object.keys(existingState.output),
})
return existingState.output
}

View File

@@ -9,11 +9,6 @@ const logger = createLogger('VariablesBlockHandler')
export class VariablesBlockHandler implements BlockHandler {
canHandle(block: SerializedBlock): boolean {
const canHandle = block.metadata?.id === BlockType.VARIABLES
logger.info(`VariablesBlockHandler.canHandle: ${canHandle}`, {
blockId: block.id,
metadataId: block.metadata?.id,
expectedType: BlockType.VARIABLES,
})
return canHandle
}
@@ -22,12 +17,6 @@ export class VariablesBlockHandler implements BlockHandler {
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput> {
logger.info(`Executing variables block: ${block.id}`, {
blockName: block.metadata?.name,
inputsKeys: Object.keys(inputs),
variablesInput: inputs.variables,
})
try {
if (!ctx.workflowVariables) {
ctx.workflowVariables = {}
@@ -53,16 +42,6 @@ export class VariablesBlockHandler implements BlockHandler {
}
}
logger.info('Variables updated', {
updatedVariables: assignments.map((a) => a.variableName),
allVariables: Object.values(ctx.workflowVariables).map((v: any) => v.name),
updatedValues: Object.entries(ctx.workflowVariables).map(([id, v]: [string, any]) => ({
id,
name: v.name,
value: v.value,
})),
})
const output: Record<string, any> = {}
for (const assignment of assignments) {
output[assignment.variableName] = assignment.value

View File

@@ -13,29 +13,25 @@ const logger = createLogger('WaitBlockHandler')
const sleep = async (ms: number, checkCancelled?: () => boolean): Promise<boolean> => {
const isClientSide = typeof window !== 'undefined'
// Server-side: simple sleep without polling
if (!isClientSide) {
await new Promise((resolve) => setTimeout(resolve, ms))
return true
}
// Client-side: check for cancellation every 100ms
const chunkMs = 100
let elapsed = 0
while (elapsed < ms) {
// Check if execution was cancelled
if (checkCancelled?.()) {
return false // Sleep was interrupted
return false
}
// Sleep for a chunk or remaining time, whichever is smaller
const sleepTime = Math.min(chunkMs, ms - elapsed)
await new Promise((resolve) => setTimeout(resolve, sleepTime))
elapsed += sleepTime
}
return true // Sleep completed normally
return true
}
/**
@@ -51,34 +47,24 @@ export class WaitBlockHandler implements BlockHandler {
block: SerializedBlock,
inputs: Record<string, any>
): Promise<any> {
logger.info(`Executing Wait block: ${block.id}`, { inputs })
// Parse the wait duration
const timeValue = Number.parseInt(inputs.timeValue || '10', 10)
const timeUnit = inputs.timeUnit || 'seconds'
// Validate time value
if (Number.isNaN(timeValue) || timeValue <= 0) {
throw new Error('Wait amount must be a positive number')
}
// Calculate wait time in milliseconds
let waitMs = timeValue * 1000 // Default to seconds
let waitMs = timeValue * 1000
if (timeUnit === 'minutes') {
waitMs = timeValue * 60 * 1000
}
// Enforce 10-minute maximum (600,000 ms)
const maxWaitMs = 10 * 60 * 1000
if (waitMs > maxWaitMs) {
const maxDisplay = timeUnit === 'minutes' ? '10 minutes' : '600 seconds'
throw new Error(`Wait time exceeds maximum of ${maxDisplay}`)
}
logger.info(`Waiting for ${waitMs}ms (${timeValue} ${timeUnit})`)
// Actually sleep for the specified duration
// The executor updates context.isCancelled when cancel() is called
const checkCancelled = () => {
return (ctx as any).isCancelled === true
}
@@ -86,14 +72,12 @@ export class WaitBlockHandler implements BlockHandler {
const completed = await sleep(waitMs, checkCancelled)
if (!completed) {
logger.info('Wait was interrupted by cancellation')
return {
waitDuration: waitMs,
status: 'cancelled',
}
}
logger.info('Wait completed successfully')
return {
waitDuration: waitMs,
status: 'completed',

View File

@@ -44,8 +44,7 @@ describe('WorkflowBlockHandler', () => {
metadata: { duration: 0 },
environmentVariables: {},
decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(),
loopItems: new Map(),
loopExecutions: new Map(),
executedBlocks: new Set(),
activeExecutionPath: new Set(),
completedLoops: new Set(),

View File

@@ -72,7 +72,6 @@ export class WorkflowBlockHandler implements BlockHandler {
throw new Error(`Child workflow ${workflowId} not found`)
}
// Get workflow metadata for logging
const { workflows } = useWorkflowRegistry.getState()
const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow'
@@ -204,8 +203,6 @@ export class WorkflowBlockHandler implements BlockHandler {
logger.info(
`Loaded ${Object.keys(workflowVariables).length} variables for child workflow: ${workflowId}`
)
} else {
logger.debug(`No workflow variables found for child workflow: ${workflowId}`)
}
return {

View File

@@ -3,4 +3,4 @@
* Exports the DAG executor as the default executor
*/
export { DAGExecutor as Executor } from './execution/executor'
export { DAGExecutor as Executor } from '@/executor/execution/executor'

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/consts'
import type { DAG } from '@/executor/dag/builder'
import type { LoopScope } from '@/executor/execution/state'
import type { BlockStateController } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { LoopConfigWithNodes } from '@/executor/types/loop'
import {
@@ -7,10 +10,8 @@ import {
buildSentinelStartId,
extractBaseBlockId,
} from '@/executor/utils/subflow-utils'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedLoop } from '@/serializer/types'
import type { DAG } from '../dag/builder'
import type { ExecutionState, LoopScope } from '../execution/state'
import type { VariableResolver } from '../variables/resolver'
const logger = createLogger('LoopOrchestrator')
@@ -27,7 +28,7 @@ export interface LoopContinuationResult {
export class LoopOrchestrator {
constructor(
private dag: DAG,
private state: ExecutionState,
private state: BlockStateController,
private resolver: VariableResolver
) {}
@@ -44,13 +45,11 @@ export class LoopOrchestrator {
}
const loopType = loopConfig.loopType
logger.debug('Initializing loop scope', { loopId, loopType })
switch (loopType) {
case 'for':
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
scope.condition = buildLoopIndexCondition(scope.maxIterations)
logger.debug('For loop initialized', { loopId, maxIterations: scope.maxIterations })
break
case 'forEach': {
@@ -59,13 +58,11 @@ export class LoopOrchestrator {
scope.maxIterations = items.length
scope.item = items[0]
scope.condition = buildLoopIndexCondition(scope.maxIterations)
logger.debug('ForEach loop initialized', { loopId, itemCount: items.length })
break
}
case 'while':
scope.condition = loopConfig.whileCondition
logger.debug('While loop initialized', { loopId, condition: scope.condition })
break
case 'doWhile':
@@ -76,14 +73,16 @@ export class LoopOrchestrator {
scope.condition = buildLoopIndexCondition(scope.maxIterations)
}
scope.skipFirstConditionCheck = true
logger.debug('DoWhile loop initialized', { loopId, condition: scope.condition })
break
default:
throw new Error(`Unknown loop type: ${loopType}`)
}
this.state.setLoopScope(loopId, scope)
if (!ctx.loopExecutions) {
ctx.loopExecutions = new Map()
}
ctx.loopExecutions.set(loopId, scope)
return scope
}
@@ -93,7 +92,7 @@ export class LoopOrchestrator {
nodeId: string,
output: NormalizedBlockOutput
): void {
const scope = this.state.getLoopScope(loopId)
const scope = ctx.loopExecutions?.get(loopId)
if (!scope) {
logger.warn('Loop scope not found for node output storage', { loopId, nodeId })
return
@@ -101,16 +100,10 @@ export class LoopOrchestrator {
const baseId = extractBaseBlockId(nodeId)
scope.currentIterationOutputs.set(baseId, output)
logger.debug('Stored loop node output', {
loopId,
nodeId: baseId,
iteration: scope.iteration,
outputsCount: scope.currentIterationOutputs.size,
})
}
evaluateLoopContinuation(ctx: ExecutionContext, loopId: string): LoopContinuationResult {
const scope = this.state.getLoopScope(loopId)
const scope = ctx.loopExecutions?.get(loopId)
if (!scope) {
logger.error('Loop scope not found during continuation evaluation', { loopId })
return {
@@ -120,6 +113,12 @@ export class LoopOrchestrator {
}
}
// Check for cancellation
if (ctx.isCancelled) {
logger.info('Loop execution cancelled', { loopId, iteration: scope.iteration })
return this.createExitResult(ctx, loopId, scope)
}
const iterationResults: NormalizedBlockOutput[] = []
for (const blockOutput of scope.currentIterationOutputs.values()) {
iterationResults.push(blockOutput)
@@ -127,11 +126,6 @@ export class LoopOrchestrator {
if (iterationResults.length > 0) {
scope.allIterationOutputs.push(iterationResults)
logger.debug('Collected iteration results', {
loopId,
iteration: scope.iteration,
resultsCount: iterationResults.length,
})
}
scope.currentIterationOutputs.clear()
@@ -140,25 +134,16 @@ export class LoopOrchestrator {
const shouldSkipFirstCheck = scope.skipFirstConditionCheck && isFirstIteration
if (!shouldSkipFirstCheck) {
if (!this.evaluateCondition(ctx, scope, scope.iteration + 1)) {
logger.debug('Loop condition false for next iteration - exiting', {
loopId,
currentIteration: scope.iteration,
nextIteration: scope.iteration + 1,
})
return this.createExitResult(ctx, loopId, scope)
}
}
scope.iteration++
if (scope.items && scope.iteration < scope.items.length) {
scope.item = scope.items[scope.iteration]
}
logger.debug('Loop will continue', {
loopId,
nextIteration: scope.iteration,
})
return {
shouldContinue: true,
shouldExit: false,
@@ -173,13 +158,7 @@ export class LoopOrchestrator {
scope: LoopScope
): LoopContinuationResult {
const results = scope.allIterationOutputs
ctx.blockStates?.set(loopId, {
output: { results },
executed: true,
executionTime: DEFAULTS.EXECUTION_TIME,
})
logger.debug('Loop exiting', { loopId, totalIterations: scope.iteration })
this.state.setBlockOutput(loopId, { results }, DEFAULTS.EXECUTION_TIME)
return {
shouldContinue: false,
@@ -210,7 +189,7 @@ export class LoopOrchestrator {
return result
}
clearLoopExecutionState(loopId: string, executedBlocks: Set<string>): void {
clearLoopExecutionState(loopId: string): void {
const loopConfig = this.dag.loopConfigs.get(loopId) as LoopConfigWithNodes | undefined
if (!loopConfig) {
logger.warn('Loop config not found for state clearing', { loopId })
@@ -221,16 +200,11 @@ export class LoopOrchestrator {
const sentinelEndId = buildSentinelEndId(loopId)
const loopNodes = loopConfig.nodes
executedBlocks.delete(sentinelStartId)
executedBlocks.delete(sentinelEndId)
this.state.unmarkExecuted(sentinelStartId)
this.state.unmarkExecuted(sentinelEndId)
for (const loopNodeId of loopNodes) {
executedBlocks.delete(loopNodeId)
this.state.unmarkExecuted(loopNodeId)
}
logger.debug('Cleared loop execution state', {
loopId,
nodesCleared: loopNodes.length + 2,
})
}
restoreLoopEdges(loopId: string): void {
@@ -267,15 +241,13 @@ export class LoopOrchestrator {
}
}
}
logger.debug('Restored loop edges', { loopId, edgesRestored: restoredCount })
}
getLoopScope(loopId: string): LoopScope | undefined {
return this.state.getLoopScope(loopId)
getLoopScope(ctx: ExecutionContext, loopId: string): LoopScope | undefined {
return ctx.loopExecutions?.get(loopId)
}
shouldExecuteLoopNode(nodeId: string, loopId: string, context: ExecutionContext): boolean {
shouldExecuteLoopNode(_ctx: ExecutionContext, _nodeId: string, _loopId: string): boolean {
return true
}
@@ -301,29 +273,45 @@ export class LoopOrchestrator {
try {
const referencePattern = /<([^>]+)>/g
let evaluatedCondition = condition
const replacements: Record<string, string> = {}
logger.info('Evaluating loop condition', {
originalCondition: condition,
iteration: scope.iteration,
workflowVariables: ctx.workflowVariables,
})
evaluatedCondition = evaluatedCondition.replace(referencePattern, (match) => {
const resolved = this.resolver.resolveSingleReference(ctx, '', match, scope)
logger.info('Resolved variable reference in loop condition', {
reference: match,
resolvedValue: resolved,
resolvedType: typeof resolved,
})
if (resolved !== undefined) {
// For booleans and numbers, return as-is (no quotes)
if (typeof resolved === 'boolean' || typeof resolved === 'number') {
return String(resolved)
}
// For strings that represent booleans, return without quotes
if (typeof resolved === 'string') {
replacements[match] = `"${resolved}"`
const lower = resolved.toLowerCase().trim()
if (lower === 'true' || lower === 'false') {
return lower
}
return `"${resolved}"`
}
replacements[match] = String(resolved)
return String(resolved)
// For other types, stringify them
return JSON.stringify(resolved)
}
return match
})
const result = Boolean(new Function(`return (${evaluatedCondition})`)())
logger.debug('Evaluated loop condition', {
condition,
replacements,
logger.info('Loop condition evaluation result', {
originalCondition: condition,
evaluatedCondition,
result,
iteration: scope.iteration,
})
return result
@@ -345,13 +333,19 @@ export class LoopOrchestrator {
if (typeof items === 'string') {
if (items.startsWith('<') && items.endsWith('>')) {
const resolved = this.resolver.resolveSingleReference(ctx, '', items)
return Array.isArray(resolved) ? resolved : []
if (Array.isArray(resolved)) {
return resolved
}
return []
}
try {
const normalized = items.replace(/'/g, '"')
const parsed = JSON.parse(normalized)
return Array.isArray(parsed) ? parsed : []
if (Array.isArray(parsed)) {
return parsed
}
return []
} catch (error) {
logger.error('Failed to parse forEach items', { items, error })
return []

View File

@@ -1,12 +1,12 @@
import { createLogger } from '@/lib/logs/console/logger'
import { EDGE } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { BlockExecutor } from '@/executor/execution/block-executor'
import type { BlockStateController } from '@/executor/execution/types'
import type { LoopOrchestrator } from '@/executor/orchestrators/loop'
import type { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { DAG, DAGNode } from '../dag/builder'
import type { BlockExecutor } from '../execution/block-executor'
import type { ExecutionState } from '../execution/state'
import type { LoopOrchestrator } from './loop'
import type { ParallelOrchestrator } from './parallel'
const logger = createLogger('NodeExecutionOrchestrator')
@@ -19,20 +19,19 @@ export interface NodeExecutionResult {
export class NodeExecutionOrchestrator {
constructor(
private dag: DAG,
private state: ExecutionState,
private state: BlockStateController,
private blockExecutor: BlockExecutor,
private loopOrchestrator: LoopOrchestrator,
private parallelOrchestrator: ParallelOrchestrator
) {}
async executeNode(nodeId: string, context: any): Promise<NodeExecutionResult> {
async executeNode(ctx: ExecutionContext, nodeId: string): Promise<NodeExecutionResult> {
const node = this.dag.nodes.get(nodeId)
if (!node) {
throw new Error(`Node not found in DAG: ${nodeId}`)
}
if (this.state.hasExecuted(nodeId)) {
logger.debug('Node already executed, skipping', { nodeId })
const output = this.state.getBlockOutput(nodeId) || {}
return {
nodeId,
@@ -42,13 +41,11 @@ export class NodeExecutionOrchestrator {
}
const loopId = node.metadata.loopId
if (loopId && !this.loopOrchestrator.getLoopScope(loopId)) {
logger.debug('Initializing loop scope before first execution', { loopId, nodeId })
this.loopOrchestrator.initializeLoopScope(context, loopId)
if (loopId && !this.loopOrchestrator.getLoopScope(ctx, loopId)) {
this.loopOrchestrator.initializeLoopScope(ctx, loopId)
}
if (loopId && !this.loopOrchestrator.shouldExecuteLoopNode(nodeId, loopId, context)) {
logger.debug('Loop node should not execute', { nodeId, loopId })
if (loopId && !this.loopOrchestrator.shouldExecuteLoopNode(ctx, nodeId, loopId)) {
return {
nodeId,
output: {},
@@ -57,12 +54,7 @@ export class NodeExecutionOrchestrator {
}
if (node.metadata.isSentinel) {
logger.debug('Executing sentinel node', {
nodeId,
sentinelType: node.metadata.sentinelType,
loopId,
})
const output = this.handleSentinel(node, context)
const output = this.handleSentinel(ctx, node)
const isFinalOutput = node.outgoingEdges.size === 0
return {
nodeId,
@@ -71,8 +63,7 @@ export class NodeExecutionOrchestrator {
}
}
logger.debug('Executing node', { nodeId, blockType: node.block.metadata?.id })
const output = await this.blockExecutor.execute(context, node, node.block)
const output = await this.blockExecutor.execute(ctx, node, node.block)
const isFinalOutput = node.outgoingEdges.size === 0
return {
nodeId,
@@ -81,53 +72,51 @@ export class NodeExecutionOrchestrator {
}
}
private handleSentinel(node: DAGNode, context: any): NormalizedBlockOutput {
private handleSentinel(ctx: ExecutionContext, node: DAGNode): NormalizedBlockOutput {
const sentinelType = node.metadata.sentinelType
const loopId = node.metadata.loopId
if (sentinelType === 'start') {
logger.debug('Sentinel start - loop entry', { nodeId: node.id, loopId })
return { sentinelStart: true }
}
if (sentinelType === 'end') {
logger.debug('Sentinel end - evaluating loop continuation', { nodeId: node.id, loopId })
if (!loopId) {
logger.warn('Sentinel end called without loopId')
return { shouldExit: true, selectedRoute: EDGE.LOOP_EXIT }
switch (sentinelType) {
case 'start': {
return { sentinelStart: true }
}
const continuationResult = this.loopOrchestrator.evaluateLoopContinuation(context, loopId)
logger.debug('Loop continuation evaluated', {
loopId,
shouldContinue: continuationResult.shouldContinue,
shouldExit: continuationResult.shouldExit,
iteration: continuationResult.currentIteration,
})
case 'end': {
if (!loopId) {
logger.warn('Sentinel end called without loopId')
return { shouldExit: true, selectedRoute: EDGE.LOOP_EXIT }
}
const continuationResult = this.loopOrchestrator.evaluateLoopContinuation(ctx, loopId)
if (continuationResult.shouldContinue) {
return {
shouldContinue: true,
shouldExit: false,
selectedRoute: continuationResult.selectedRoute,
loopIteration: continuationResult.currentIteration,
}
}
if (continuationResult.shouldContinue) {
return {
shouldContinue: true,
shouldExit: false,
results: continuationResult.aggregatedResults || [],
shouldContinue: false,
shouldExit: true,
selectedRoute: continuationResult.selectedRoute,
loopIteration: continuationResult.currentIteration,
totalIterations: continuationResult.aggregatedResults?.length || 0,
}
}
return {
results: continuationResult.aggregatedResults || [],
shouldContinue: false,
shouldExit: true,
selectedRoute: continuationResult.selectedRoute,
totalIterations: continuationResult.aggregatedResults?.length || 0,
}
default:
logger.warn('Unknown sentinel type', { sentinelType })
return {}
}
logger.warn('Unknown sentinel type', { sentinelType })
return {}
}
async handleNodeCompletion(
ctx: ExecutionContext,
nodeId: string,
output: NormalizedBlockOutput,
context: any
output: NormalizedBlockOutput
): Promise<void> {
const node = this.dag.nodes.get(nodeId)
if (!node) {
@@ -135,74 +124,70 @@ export class NodeExecutionOrchestrator {
return
}
logger.debug('Handling node completion', {
nodeId: node.id,
hasLoopId: !!node.metadata.loopId,
isParallelBranch: !!node.metadata.isParallelBranch,
isSentinel: !!node.metadata.isSentinel,
})
const loopId = node.metadata.loopId
const isParallelBranch = node.metadata.isParallelBranch
const isSentinel = node.metadata.isSentinel
if (isSentinel) {
logger.debug('Handling sentinel node', { nodeId: node.id, loopId })
this.handleRegularNodeCompletion(node, output, context)
this.handleRegularNodeCompletion(ctx, node, output)
} else if (loopId) {
logger.debug('Handling loop node', { nodeId: node.id, loopId })
this.handleLoopNodeCompletion(node, output, loopId, context)
this.handleLoopNodeCompletion(ctx, node, output, loopId)
} else if (isParallelBranch) {
const parallelId = this.findParallelIdForNode(node.id)
if (parallelId) {
logger.debug('Handling parallel node', { nodeId: node.id, parallelId })
this.handleParallelNodeCompletion(node, output, parallelId)
this.handleParallelNodeCompletion(ctx, node, output, parallelId)
} else {
this.handleRegularNodeCompletion(node, output, context)
this.handleRegularNodeCompletion(ctx, node, output)
}
} else {
logger.debug('Handling regular node', { nodeId: node.id })
this.handleRegularNodeCompletion(node, output, context)
this.handleRegularNodeCompletion(ctx, node, output)
}
}
private handleLoopNodeCompletion(
ctx: ExecutionContext,
node: DAGNode,
output: NormalizedBlockOutput,
loopId: string,
context: ExecutionContext
loopId: string
): void {
this.loopOrchestrator.storeLoopNodeOutput(context, loopId, node.id, output)
this.loopOrchestrator.storeLoopNodeOutput(ctx, loopId, node.id, output)
this.state.setBlockOutput(node.id, output)
}
private handleParallelNodeCompletion(
ctx: ExecutionContext,
node: DAGNode,
output: NormalizedBlockOutput,
parallelId: string
): void {
const scope = this.parallelOrchestrator.getParallelScope(parallelId)
const scope = this.parallelOrchestrator.getParallelScope(ctx, parallelId)
if (!scope) {
const totalBranches = node.metadata.branchTotal || 1
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
const nodesInParallel = (parallelConfig as any)?.nodes?.length || 1
this.parallelOrchestrator.initializeParallelScope(parallelId, totalBranches, nodesInParallel)
this.parallelOrchestrator.initializeParallelScope(
ctx,
parallelId,
totalBranches,
nodesInParallel
)
}
const allComplete = this.parallelOrchestrator.handleParallelBranchCompletion(
ctx,
parallelId,
node.id,
output
)
if (allComplete) {
this.parallelOrchestrator.aggregateParallelResults(parallelId)
this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
}
this.state.setBlockOutput(node.id, output)
}
private handleRegularNodeCompletion(
ctx: ExecutionContext,
node: DAGNode,
output: NormalizedBlockOutput,
context: any
output: NormalizedBlockOutput
): void {
this.state.setBlockOutput(node.id, output)
@@ -213,8 +198,7 @@ export class NodeExecutionOrchestrator {
) {
const loopId = node.metadata.loopId
if (loopId) {
logger.debug('Preparing loop for next iteration', { loopId })
this.loopOrchestrator.clearLoopExecutionState(loopId, this.state.executedBlocks)
this.loopOrchestrator.clearLoopExecutionState(loopId)
this.loopOrchestrator.restoreLoopEdges(loopId)
}
}

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { NormalizedBlockOutput } from '@/executor/types'
import type { DAG } from '@/executor/dag/builder'
import type { ParallelScope } from '@/executor/execution/state'
import type { BlockStateWriter } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
import {
calculateBranchCount,
@@ -8,8 +11,6 @@ import {
parseDistributionItems,
} from '@/executor/utils/subflow-utils'
import type { SerializedParallel } from '@/serializer/types'
import type { DAG } from '../dag/builder'
import type { ExecutionState, ParallelScope } from '../execution/state'
const logger = createLogger('ParallelOrchestrator')
@@ -30,10 +31,11 @@ export interface ParallelAggregationResult {
export class ParallelOrchestrator {
constructor(
private dag: DAG,
private state: ExecutionState
private state: BlockStateWriter
) {}
initializeParallelScope(
ctx: ExecutionContext,
parallelId: string,
totalBranches: number,
terminalNodesCount = 1
@@ -45,22 +47,20 @@ export class ParallelOrchestrator {
completedCount: 0,
totalExpectedNodes: totalBranches * terminalNodesCount,
}
this.state.setParallelScope(parallelId, scope)
logger.debug('Initialized parallel scope', {
parallelId,
totalBranches,
terminalNodesCount,
totalExpectedNodes: scope.totalExpectedNodes,
})
if (!ctx.parallelExecutions) {
ctx.parallelExecutions = new Map()
}
ctx.parallelExecutions.set(parallelId, scope)
return scope
}
handleParallelBranchCompletion(
ctx: ExecutionContext,
parallelId: string,
nodeId: string,
output: NormalizedBlockOutput
): boolean {
const scope = this.state.getParallelScope(parallelId)
const scope = ctx.parallelExecutions?.get(parallelId)
if (!scope) {
logger.warn('Parallel scope not found for branch completion', { parallelId, nodeId })
return false
@@ -77,27 +77,13 @@ export class ParallelOrchestrator {
}
scope.branchOutputs.get(branchIndex)!.push(output)
scope.completedCount++
logger.debug('Recorded parallel branch output', {
parallelId,
branchIndex,
nodeId,
completedCount: scope.completedCount,
totalExpected: scope.totalExpectedNodes,
})
const allComplete = scope.completedCount >= scope.totalExpectedNodes
if (allComplete) {
logger.debug('All parallel branches completed', {
parallelId,
totalBranches: scope.totalBranches,
completedNodes: scope.completedCount,
})
}
return allComplete
}
aggregateParallelResults(parallelId: string): ParallelAggregationResult {
const scope = this.state.getParallelScope(parallelId)
aggregateParallelResults(ctx: ExecutionContext, parallelId: string): ParallelAggregationResult {
const scope = ctx.parallelExecutions?.get(parallelId)
if (!scope) {
logger.error('Parallel scope not found for aggregation', { parallelId })
return { allBranchesComplete: false }
@@ -111,12 +97,6 @@ export class ParallelOrchestrator {
this.state.setBlockOutput(parallelId, {
results,
})
logger.debug('Aggregated parallel results', {
parallelId,
totalBranches: scope.totalBranches,
nodesPerBranch: results[0]?.length || 0,
totalOutputs: scope.completedCount,
})
return {
allBranchesComplete: true,
results,
@@ -151,8 +131,8 @@ export class ParallelOrchestrator {
}
}
getParallelScope(parallelId: string): ParallelScope | undefined {
return this.state.getParallelScope(parallelId)
getParallelScope(ctx: ExecutionContext, parallelId: string): ParallelScope | undefined {
return ctx.parallelExecutions?.get(parallelId)
}
findParallelIdForNode(baseNodeId: string): string | undefined {

View File

@@ -0,0 +1,73 @@
import { PARALLEL } from '@/executor/consts'
import type { ExecutionContext, LoopPauseScope, ParallelPauseScope } from '@/executor/types'
interface NodeMetadataLike {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
export function generatePauseContextId(
baseBlockId: string,
nodeMetadata: NodeMetadataLike,
loopScope?: LoopPauseScope
): string {
let contextId = baseBlockId
if (typeof nodeMetadata.branchIndex === 'number') {
contextId = `${contextId}${PARALLEL.BRANCH.PREFIX}${nodeMetadata.branchIndex}${PARALLEL.BRANCH.SUFFIX}`
}
if (loopScope) {
contextId = `${contextId}_loop${loopScope.iteration}`
}
return contextId
}
export function buildTriggerBlockId(nodeId: string): string {
if (nodeId.includes('__response')) {
return nodeId.replace('__response', '__trigger')
}
if (nodeId.endsWith('_response')) {
return nodeId.replace(/_response$/, '_trigger')
}
return `${nodeId}__trigger`
}
export function mapNodeMetadataToPauseScopes(
ctx: ExecutionContext,
nodeMetadata: NodeMetadataLike
): {
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
} {
let parallelScope: ParallelPauseScope | undefined
let loopScope: LoopPauseScope | undefined
if (nodeMetadata.parallelId && typeof nodeMetadata.branchIndex === 'number') {
parallelScope = {
parallelId: nodeMetadata.parallelId,
branchIndex: nodeMetadata.branchIndex,
branchTotal: nodeMetadata.branchTotal,
}
}
if (nodeMetadata.loopId) {
const loopExecution = ctx.loopExecutions?.get(nodeMetadata.loopId)
const iteration = loopExecution?.iteration ?? 0
loopScope = {
loopId: nodeMetadata.loopId,
iteration,
}
}
return {
parallelScope,
loopScope,
}
}

View File

@@ -2,9 +2,6 @@ import type { TraceSpan } from '@/lib/logs/types'
import type { BlockOutput } from '@/blocks/types'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
/**
* User-facing file object with simplified interface
*/
export interface UserFile {
id: string
name: string
@@ -15,14 +12,62 @@ export interface UserFile {
context?: string
}
/**
* Standardized block output format that ensures compatibility with the execution engine.
*/
export interface ParallelPauseScope {
parallelId: string
branchIndex: number
branchTotal?: number
}
export interface LoopPauseScope {
loopId: string
iteration: number
}
export interface PauseMetadata {
contextId: string
blockId: string
response: any
timestamp: string
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
}
export type ResumeStatus = 'paused' | 'resumed' | 'failed' | 'queued' | 'resuming'
export interface PausePoint {
contextId: string
blockId?: string
response: any
registeredAt: string
resumeStatus: ResumeStatus
snapshotReady: boolean
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
}
export interface SerializedSnapshot {
snapshot: string
triggerIds: string[]
}
export interface NormalizedBlockOutput {
[key: string]: any
// Content fields
content?: string // Text content from LLM responses
model?: string // Model identifier used for generation
content?: string
model?: string
tokens?: {
prompt?: number
completion?: number
@@ -32,131 +77,122 @@ export interface NormalizedBlockOutput {
list: any[]
count: number
}
// File fields
files?: UserFile[] // Binary files/attachments from this block
// Path selection fields
files?: UserFile[]
selectedPath?: {
blockId: string
blockType?: string
blockTitle?: string
}
selectedConditionId?: string // ID of selected condition
conditionResult?: boolean // Whether condition evaluated to true
// Generic result fields
result?: any // Generic result value
stdout?: string // Standard output from function execution
executionTime?: number // Time taken to execute
// API response fields
data?: any // Response data from API calls
status?: number // HTTP status code
headers?: Record<string, string> // HTTP headers
// Error handling
error?: string // Error message if block execution failed
// Child workflow introspection (for workflow blocks)
selectedConditionId?: string
conditionResult?: boolean
result?: any
stdout?: string
executionTime?: number
data?: any
status?: number
headers?: Record<string, string>
error?: string
childTraceSpans?: TraceSpan[]
childWorkflowName?: string
_pauseMetadata?: PauseMetadata
}
/**
* Execution log entry for a single block.
*/
export interface BlockLog {
blockId: string // Unique identifier of the executed block
blockName?: string // Display name of the block
blockType?: string // Type of the block (agent, router, etc.)
startedAt: string // ISO timestamp when execution started
endedAt: string // ISO timestamp when execution completed
durationMs: number // Duration of execution in milliseconds
success: boolean // Whether execution completed successfully
output?: any // Output data from successful execution
input?: any // Input data for the block execution
error?: string // Error message if execution failed
loopId?: string // Loop ID if this block is part of a loop
parallelId?: string // Parallel ID if this block is part of a parallel
iterationIndex?: number // Iteration number for loop/parallel blocks
blockId: string
blockName?: string
blockType?: string
startedAt: string
endedAt: string
durationMs: number
success: boolean
output?: any
input?: any
error?: string
loopId?: string
parallelId?: string
iterationIndex?: number
}
/**
* Timing metadata for workflow execution.
*/
export interface ExecutionMetadata {
startTime?: string // ISO timestamp when workflow execution started
endTime?: string // ISO timestamp when workflow execution completed
duration: number // Duration of workflow execution in milliseconds
pendingBlocks?: string[] // List of block IDs that are pending execution
isDebugSession?: boolean // Whether the workflow is running in debug mode
context?: ExecutionContext // Runtime context for the workflow
workflowConnections?: Array<{ source: string; target: string }> // Connections between workflow blocks
requestId?: string
workflowId?: string
workspaceId?: string
startTime?: string
endTime?: string
duration: number
pendingBlocks?: string[]
isDebugSession?: boolean
context?: ExecutionContext
workflowConnections?: Array<{ source: string; target: string }>
status?: 'running' | 'paused' | 'completed'
pausePoints?: string[]
resumeChain?: {
parentExecutionId?: string
depth: number
}
userId?: string
executionId?: string
triggerType?: string
triggerBlockId?: string
useDraftState?: boolean
resumeFromSnapshot?: boolean
}
/**
* Current state of a block during workflow execution.
*/
export interface BlockState {
output: NormalizedBlockOutput // Current output data from the block
executed: boolean // Whether the block has been executed
executionTime: number // Time taken to execute in milliseconds
output: NormalizedBlockOutput
executed: boolean
executionTime: number
}
/**
* Runtime context for workflow execution.
*/
export interface ExecutionContext {
workflowId: string // Unique identifier for this workflow execution
workspaceId?: string // Workspace ID for file storage scoping
executionId?: string // Unique execution ID for file storage scoping
userId?: string // User ID for file storage attribution
// Whether this execution is running against deployed state (API/webhook/schedule/chat)
// Manual executions in the builder should leave this undefined/false
workflowId: string
workspaceId?: string
executionId?: string
userId?: string
isDeployedContext?: boolean
// CONSOLIDATED STATE - Single source of truth for execution state
// Uses shared references with ExecutionState class
blockStates: Map<string, BlockState>
executedBlocks: Set<string> // Set of block IDs that have been executed
blockStates: ReadonlyMap<string, BlockState>
executedBlocks: ReadonlySet<string>
blockLogs: BlockLog[] // Chronological log of block executions
metadata: ExecutionMetadata // Timing metadata for the execution
environmentVariables: Record<string, string> // Environment variables available during execution
workflowVariables?: Record<string, any> // Workflow variables available during execution
blockLogs: BlockLog[]
metadata: ExecutionMetadata
environmentVariables: Record<string, string>
workflowVariables?: Record<string, any>
// Routing decisions for path determination
decisions: {
router: Map<string, string> // Router block ID -> Target block ID
condition: Map<string, string> // Condition block ID -> Selected condition ID
router: Map<string, string>
condition: Map<string, string>
}
loopIterations: Map<string, number> // Tracks current iteration count for each loop
loopItems: Map<string, any> // Tracks current item for forEach loops and parallel distribution
completedLoops: Set<string> // Tracks which loops have completed all iterations
completedLoops: Set<string>
loopExecutions?: Map<
string,
{
iteration: number
currentIterationOutputs: Map<string, any>
allIterationOutputs: any[][]
maxIterations?: number
item?: any
items?: any[]
condition?: string
skipFirstConditionCheck?: boolean
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
}
>
// Parallel execution tracking
parallelExecutions?: Map<
string,
{
parallelCount: number
distributionItems: any[] | Record<string, any> | null
completedExecutions: number
executionResults: Map<string, any>
activeIterations: Set<number>
currentIteration: number
parallelId: string
totalBranches: number
branchOutputs: Map<number, any[]>
completedCount: number
totalExpectedNodes: number
parallelType?: 'count' | 'collection'
}
>
// Loop execution tracking
loopExecutions?: Map<
string,
{
maxIterations: number
loopType: 'for' | 'forEach'
forEachItems?: any[] | Record<string, any> | null
executionResults: Map<string, any> // iteration_0, iteration_1, etc.
currentIteration: number
}
>
// Mapping for virtual parallel block IDs to their original blocks
parallelBlockMapping?: Map<
string,
{
@@ -166,19 +202,16 @@ export interface ExecutionContext {
}
>
// Current virtual block being executed (for parallel iterations)
currentVirtualBlockId?: string
activeExecutionPath: Set<string> // Set of block IDs in the current execution path
activeExecutionPath: Set<string>
workflow?: SerializedWorkflow // Reference to the workflow being executed
workflow?: SerializedWorkflow
// Streaming support and output selection
stream?: boolean // Whether to use streaming responses when available
selectedOutputs?: string[] // IDs of blocks selected for streaming output
edges?: Array<{ source: string; target: string }> // Workflow edge connections
stream?: boolean
selectedOutputs?: string[]
edges?: Array<{ source: string; target: string }>
// New context extensions
onStream?: (streamingExecution: StreamingExecution) => Promise<void>
onBlockStart?: (blockId: string, blockName: string, blockType: string) => Promise<void>
onBlockComplete?: (
@@ -187,45 +220,34 @@ export interface ExecutionContext {
blockType: string,
output: any
) => Promise<void>
// Cancellation support
isCancelled?: boolean
}
/**
* Complete result from executing a workflow.
*/
export interface ExecutionResult {
success: boolean // Whether the workflow executed successfully
output: NormalizedBlockOutput // Final output data from the workflow
error?: string // Error message if execution failed
logs?: BlockLog[] // Execution logs for all blocks
success: boolean
output: NormalizedBlockOutput
error?: string
logs?: BlockLog[]
metadata?: ExecutionMetadata
status?: 'completed' | 'paused'
pausePoints?: PausePoint[]
snapshotSeed?: SerializedSnapshot
_streamingMetadata?: {
// Internal metadata for streaming execution
loggingSession: any
processedInput: any
}
}
/**
* Streaming execution result combining a readable stream with execution metadata.
* This allows us to stream content to the UI while still capturing all execution logs.
*/
export interface StreamingExecution {
stream: ReadableStream // The streaming response for the UI to consume
execution: ExecutionResult & { isStreaming?: boolean } // The complete execution data for logging purposes
stream: ReadableStream
execution: ExecutionResult & { isStreaming?: boolean }
}
/**
* Interface for a block executor component.
*/
export interface BlockExecutor {
/**
* Determines if this executor can process the given block.
*/
canExecute(block: SerializedBlock): boolean
/**
* Executes the block with the given inputs and context.
*/
execute(
block: SerializedBlock,
inputs: Record<string, any>,
@@ -233,17 +255,7 @@ export interface BlockExecutor {
): Promise<BlockOutput>
}
/**
* Interface for block handlers that execute specific block types.
* Each handler is responsible for executing a particular type of block.
*/
export interface BlockHandler {
/**
* Determines if this handler can process the given block.
*
* @param block - Block to check
* @returns True if this handler can process the block
*/
canHandle(block: SerializedBlock): boolean
execute(
@@ -251,39 +263,43 @@ export interface BlockHandler {
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput | StreamingExecution>
executeWithNode?: (
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
) => Promise<BlockOutput | StreamingExecution>
}
/**
* Definition of a tool that can be invoked by blocks.
*
* @template P - Parameter type for the tool
* @template O - Output type from the tool
*/
export interface Tool<P = any, O = Record<string, any>> {
id: string // Unique identifier for the tool
name: string // Display name of the tool
description: string // Description of what the tool does
version: string // Version string for the tool
id: string
name: string
description: string
version: string
// Parameter definitions for the tool
params: {
[key: string]: {
type: string // Data type of the parameter
required?: boolean // Whether the parameter is required
description?: string // Description of the parameter
default?: any // Default value if not provided
type: string
required?: boolean
description?: string
default?: any
}
}
// HTTP request configuration for API tools
request?: {
url?: string | ((params: P) => string) // URL or function to generate URL
method?: string // HTTP method to use
headers?: (params: P) => Record<string, string> // Function to generate request headers
body?: (params: P) => Record<string, any> // Function to generate request body
url?: string | ((params: P) => string)
method?: string
headers?: (params: P) => Record<string, string>
body?: (params: P) => Record<string, any>
}
// Function to transform API response to tool output
transformResponse?: (response: any) => Promise<{
success: boolean
output: O
@@ -291,16 +307,10 @@ export interface Tool<P = any, O = Record<string, any>> {
}>
}
/**
* Registry of available tools indexed by ID.
*/
export interface ToolRegistry {
[key: string]: Tool
}
/**
* Interface for a stream processor that can process a stream based on a response format.
*/
export interface ResponseFormatStreamProcessor {
processStream(
originalStream: ReadableStream,

View File

@@ -5,14 +5,14 @@ export interface BlockDataCollection {
blockNameMapping: Record<string, string>
}
export function collectBlockData(context: ExecutionContext): BlockDataCollection {
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record<string, any> = {}
const blockNameMapping: Record<string, string> = {}
for (const [id, state] of context.blockStates.entries()) {
for (const [id, state] of ctx.blockStates.entries()) {
if (state.output !== undefined) {
blockData[id] = state.output
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === id)
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
if (workflowBlock?.metadata?.name) {
blockNameMapping[workflowBlock.metadata.name] = id
const normalized = workflowBlock.metadata.name.replace(/\s+/g, '').toLowerCase()

View File

@@ -57,7 +57,6 @@ export class ConnectionUtils {
scopeNodes.includes(conn.source)
)
// Has external connections if total incoming > internal connections
return incomingConnections.length > internalConnections.length
}
@@ -74,10 +73,9 @@ export class ConnectionUtils {
ConnectionUtils.getInternalConnections(nodeId, scopeNodes, connections).length > 0
if (hasInternalConnections) {
return false // Has internal connections, not an entry point
return false
}
// Only entry point if it has external connections (not completely unconnected)
return ConnectionUtils.hasExternalConnections(nodeId, scopeNodes, connections)
}
}

View File

@@ -25,7 +25,6 @@ export class FileToolProcessor {
const processedOutput = { ...toolOutput }
// Process each output that's marked as file or file[]
for (const [outputKey, outputDef] of Object.entries(toolConfig.outputs)) {
if (!FileToolProcessor.isFileOutput(outputDef.type)) {
continue
@@ -101,7 +100,33 @@ export class FileToolProcessor {
context: ExecutionContext
): Promise<UserFile> {
try {
if (fileData.url) {
let buffer: Buffer | null = null
if (Buffer.isBuffer(fileData.data)) {
buffer = fileData.data
} else if (
fileData.data &&
typeof fileData.data === 'object' &&
'type' in fileData.data &&
'data' in fileData.data
) {
const serializedBuffer = fileData.data as { type: string; data: number[] }
if (serializedBuffer.type === 'Buffer' && Array.isArray(serializedBuffer.data)) {
buffer = Buffer.from(serializedBuffer.data)
} else {
throw new Error(`Invalid serialized buffer format for ${fileData.name}`)
}
} else if (typeof fileData.data === 'string' && fileData.data) {
let base64Data = fileData.data
if (base64Data.includes('-') || base64Data.includes('_')) {
base64Data = base64Data.replace(/-/g, '+').replace(/_/g, '/')
}
buffer = Buffer.from(base64Data, 'base64')
}
if (!buffer && fileData.url) {
const response = await fetch(fileData.url)
if (!response.ok) {
@@ -109,8 +134,10 @@ export class FileToolProcessor {
}
const arrayBuffer = await response.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
buffer = Buffer.from(arrayBuffer)
}
if (buffer) {
if (buffer.length === 0) {
throw new Error(`File '${fileData.name}' has zero bytes`)
}
@@ -128,6 +155,12 @@ export class FileToolProcessor {
)
}
if (!fileData.data) {
throw new Error(
`File data for '${fileData.name}' must have either 'data' (Buffer/base64) or 'url' property`
)
}
return uploadFileFromRawData(
{
name: fileData.name,

View File

@@ -11,9 +11,6 @@ export function parseJSON<T>(value: unknown, fallback: T): T {
try {
return JSON.parse(value.trim())
} catch (error) {
logger.debug('Failed to parse JSON, using fallback', {
error: error instanceof Error ? error.message : String(error),
})
return fallback
}
}

View File

@@ -1,14 +1,14 @@
import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, REFERENCE } from '@/executor/consts'
import type { ExecutionState, LoopScope } from '@/executor/execution/state'
import type { ExecutionContext } from '@/executor/types'
import { BlockResolver } from '@/executor/variables/resolvers/block'
import { EnvResolver } from '@/executor/variables/resolvers/env'
import { LoopResolver } from '@/executor/variables/resolvers/loop'
import { ParallelResolver } from '@/executor/variables/resolvers/parallel'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import { WorkflowResolver } from '@/executor/variables/resolvers/workflow'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
import type { ExecutionState, LoopScope } from '../execution/state'
import { BlockResolver } from './resolvers/block'
import { EnvResolver } from './resolvers/env'
import { LoopResolver } from './resolvers/loop'
import { ParallelResolver } from './resolvers/parallel'
import type { ResolutionContext, Resolver } from './resolvers/reference'
import { WorkflowResolver } from './resolvers/workflow'
const logger = createLogger('VariableResolver')
@@ -17,8 +17,8 @@ export class VariableResolver {
private blockResolver: BlockResolver
constructor(
private workflow: SerializedWorkflow,
private workflowVariables: Record<string, any>,
workflow: SerializedWorkflow,
workflowVariables: Record<string, any>,
private state: ExecutionState
) {
this.blockResolver = new BlockResolver(workflow)
@@ -93,6 +93,20 @@ export class VariableResolver {
reference: string,
loopScope?: LoopScope
): any {
if (typeof reference === 'string') {
const trimmed = reference.trim()
if (/^<[^<>]+>$/.test(trimmed)) {
const resolutionContext: ResolutionContext = {
executionContext: ctx,
executionState: this.state,
currentNodeId,
loopScope,
}
return this.resolveReference(trimmed, resolutionContext)
}
}
return this.resolveValue(ctx, currentNodeId, reference, loopScope)
}
@@ -182,10 +196,6 @@ export class VariableResolver {
return result
}
/**
* Resolves template string but without condition-specific formatting.
* Used when resolving condition values that are already parsed from JSON.
*/
private resolveTemplateWithoutConditionFormatting(
ctx: ExecutionContext,
currentNodeId: string,
@@ -215,17 +225,13 @@ export class VariableResolver {
return match
}
// Format value for JavaScript evaluation
// Strings need to be quoted, objects need JSON.stringify
if (typeof resolved === 'string') {
// Escape backslashes first, then single quotes, then wrap in single quotes
const escaped = resolved.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
return `'${escaped}'`
}
if (typeof resolved === 'object' && resolved !== null) {
return JSON.stringify(resolved)
}
// For numbers, booleans, null, undefined - use as-is
return String(resolved)
} catch (error) {
replacementError = error instanceof Error ? error : new Error(String(error))
@@ -249,11 +255,6 @@ export class VariableResolver {
for (const resolver of this.resolvers) {
if (resolver.canResolve(reference)) {
const result = resolver.resolve(reference, context)
logger.debug('Reference resolved', {
reference,
resolver: resolver.constructor.name,
result,
})
return result
}
}

View File

@@ -1,10 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, SPECIAL_REFERENCE_PREFIXES } from '@/executor/consts'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types'
import { normalizeBlockName } from '@/stores/workflows/utils'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('BlockResolver')
export class BlockResolver implements Resolver {
private blockByNormalizedName: Map<string, string>
@@ -38,25 +35,13 @@ export class BlockResolver implements Resolver {
return undefined
}
const [blockName, ...pathParts] = parts
logger.debug('Resolving block reference', {
reference,
blockName,
pathParts,
})
const blockId = this.findBlockIdByName(blockName)
if (!blockId) {
logger.debug('Block not found by name, skipping resolution', { blockName, reference })
return undefined
}
const output = this.getBlockOutput(blockId, context)
logger.debug('Block output retrieved', {
blockName,
blockId,
hasOutput: !!output,
outputKeys: output ? Object.keys(output) : [],
})
if (!output) {
throw new Error(`No state found for block "${blockName}"`)
@@ -74,16 +59,11 @@ export class BlockResolver implements Resolver {
)
}
logger.debug('Navigated path result', {
blockName,
pathParts,
result,
})
return result
}
private getBlockOutput(blockId: string, context: ResolutionContext): any {
const stateOutput = context.executionState.getBlockOutput(blockId)
const stateOutput = context.executionState.getBlockOutput(blockId, context.currentNodeId)
if (stateOutput !== undefined) {
return stateOutput
}
@@ -164,7 +144,7 @@ export class BlockResolver implements Resolver {
return value
}
formatValueForBlock(
public formatValueForBlock(
value: any,
blockType: string | undefined,
isInTemplateLiteral = false

View File

@@ -1,6 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import { extractEnvVarName, isEnvVarReference } from '@/executor/consts'
import type { ResolutionContext, Resolver } from './reference'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
const logger = createLogger('EnvResolver')
@@ -14,7 +14,6 @@ export class EnvResolver implements Resolver {
const value = context.executionContext.environmentVariables?.[varName]
if (value === undefined) {
logger.debug('Environment variable not found, returning original reference', { varName })
return reference
}
return value

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('LoopResolver')
@@ -34,10 +34,9 @@ export class LoopResolver implements Resolver {
if (!loopScope) {
const loopId = this.findLoopForBlock(context.currentNodeId)
if (!loopId) {
logger.debug('Block not in a loop', { nodeId: context.currentNodeId })
return undefined
}
loopScope = context.executionState.getLoopScope(loopId)
loopScope = context.executionContext.loopExecutions?.get(loopId)
}
if (!loopScope) {

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('ParallelResolver')
@@ -31,7 +31,6 @@ export class ParallelResolver implements Resolver {
const [_, property] = parts
const parallelId = this.findParallelForBlock(context.currentNodeId)
if (!parallelId) {
logger.debug('Block not in a parallel', { nodeId: context.currentNodeId })
return undefined
}
@@ -43,7 +42,6 @@ export class ParallelResolver implements Resolver {
const branchIndex = extractBranchIndex(context.currentNodeId)
if (branchIndex === null) {
logger.debug('Node ID does not have branch index', { nodeId: context.currentNodeId })
return undefined
}

View File

@@ -1,5 +1,5 @@
import type { ExecutionState, LoopScope } from '@/executor/execution/state'
import type { ExecutionContext } from '@/executor/types'
import type { ExecutionState, LoopScope } from '../../execution/state'
export interface ResolutionContext {
executionContext: ExecutionContext
executionState: ExecutionState

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger'
import { VariableManager } from '@/lib/variables/variable-manager'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import type { ResolutionContext, Resolver } from './reference'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
const logger = createLogger('WorkflowResolver')
@@ -28,22 +29,24 @@ export class WorkflowResolver implements Resolver {
const [_, variableName] = parts
if (context.executionContext.workflowVariables) {
for (const varObj of Object.values(context.executionContext.workflowVariables)) {
const v = varObj as any
if (v.name === variableName || v.id === variableName) {
const workflowVars = context.executionContext.workflowVariables || this.workflowVariables
for (const varObj of Object.values(workflowVars)) {
const v = varObj as any
if (v && (v.name === variableName || v.id === variableName)) {
const normalizedType = (v.type === 'string' ? 'plain' : v.type) || 'plain'
try {
return VariableManager.resolveForExecution(v.value, normalizedType)
} catch (error) {
logger.warn('Failed to resolve workflow variable, returning raw value', {
variableName,
error: (error as Error).message,
})
return v.value
}
}
}
for (const varObj of Object.values(this.workflowVariables)) {
const v = varObj as any
if (v.name === variableName || v.id === variableName) {
return v.value
}
}
logger.debug('Workflow variable not found', { variableName })
return undefined
}
}

View File

@@ -4,7 +4,6 @@ import { useSession } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useSocket } from '@/contexts/socket-context'
import { useUndoRedo } from '@/hooks/use-undo-redo'
import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store'
@@ -778,9 +777,7 @@ export function useCollaborativeWorkflow() {
// Get outputs based on trigger mode
const isTriggerMode = triggerMode || false
const outputs = isTriggerMode
? getBlockOutputs(type, subBlocks, isTriggerMode)
: resolveOutputType(blockConfig.outputs)
const outputs = getBlockOutputs(type, subBlocks, isTriggerMode)
const completeBlockData = {
id,

View File

@@ -9,7 +9,6 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/custom-tools-persi
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { validateWorkflowState } from '@/lib/workflows/validation'
import { getAllBlocks } from '@/blocks/registry'
import { resolveOutputType } from '@/blocks/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
interface EditWorkflowOperation {
@@ -134,9 +133,7 @@ function createBlockFromParams(blockId: string, params: any, parentId?: string):
subBlocks[key] = { id: key, type: 'short-input', value: value }
})
}
outputs = triggerMode
? getBlockOutputs(params.type, subBlocks, triggerMode)
: resolveOutputType(blockConfig.outputs)
outputs = getBlockOutputs(params.type, subBlocks, triggerMode)
} else {
outputs = {}
}

View File

@@ -42,6 +42,67 @@ export interface ToolCall {
const logger = createLogger('ExecutionLogger')
export class ExecutionLogger implements IExecutionLoggerService {
private mergeTraceSpans(existing: TraceSpan[], additional: TraceSpan[]): TraceSpan[] {
// If no existing spans, just return additional
if (!existing || existing.length === 0) return additional
if (!additional || additional.length === 0) return existing
// Find the root "Workflow Execution" span in both arrays
const existingRoot = existing.find((s) => s.name === 'Workflow Execution')
const additionalRoot = additional.find((s) => s.name === 'Workflow Execution')
if (!existingRoot || !additionalRoot) {
// If we can't find both roots, just concatenate (fallback)
return [...existing, ...additional]
}
// Calculate the full duration from original start to resume end
const startTime = existingRoot.startTime
const endTime = additionalRoot.endTime || existingRoot.endTime
const fullDuration =
startTime && endTime
? new Date(endTime).getTime() - new Date(startTime).getTime()
: (existingRoot.duration || 0) + (additionalRoot.duration || 0)
// Merge the children of the workflow execution spans
const mergedRoot = {
...existingRoot,
children: [...(existingRoot.children || []), ...(additionalRoot.children || [])],
endTime,
duration: fullDuration,
}
// Return array with merged root plus any other top-level spans
const otherExisting = existing.filter((s) => s.name !== 'Workflow Execution')
const otherAdditional = additional.filter((s) => s.name !== 'Workflow Execution')
return [mergedRoot, ...otherExisting, ...otherAdditional]
}
private mergeCostModels(
existing: Record<string, any>,
additional: Record<string, any>
): Record<string, any> {
const merged = { ...existing }
for (const [model, costs] of Object.entries(additional)) {
if (merged[model]) {
merged[model] = {
input: (merged[model].input || 0) + (costs.input || 0),
output: (merged[model].output || 0) + (costs.output || 0),
total: (merged[model].total || 0) + (costs.total || 0),
tokens: {
prompt: (merged[model].tokens?.prompt || 0) + (costs.tokens?.prompt || 0),
completion: (merged[model].tokens?.completion || 0) + (costs.tokens?.completion || 0),
total: (merged[model].tokens?.total || 0) + (costs.tokens?.total || 0),
},
}
} else {
merged[model] = costs
}
}
return merged
}
async startWorkflowExecution(params: {
workflowId: string
executionId: string
@@ -161,6 +222,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
finalOutput: BlockOutputData
traceSpans?: TraceSpan[]
workflowInput?: any
isResume?: boolean // If true, merge with existing data instead of replacing
}): Promise<WorkflowExecutionLog> {
const {
executionId,
@@ -170,9 +232,21 @@ export class ExecutionLogger implements IExecutionLoggerService {
finalOutput,
traceSpans,
workflowInput,
isResume,
} = params
logger.debug(`Completing workflow execution ${executionId}`)
logger.debug(`Completing workflow execution ${executionId}`, { isResume })
// If this is a resume, fetch the existing log to merge data
let existingLog: any = null
if (isResume) {
const [existing] = await db
.select()
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.executionId, executionId))
.limit(1)
existingLog = existing
}
// Determine if workflow failed by checking trace spans for errors
const hasErrors = traceSpans?.some((span: any) => {
@@ -191,29 +265,34 @@ export class ExecutionLogger implements IExecutionLoggerService {
// Extract files from trace spans, final output, and workflow input
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput, workflowInput)
const filteredTraceSpans = filterForDisplay(traceSpans)
// For resume executions, rebuild trace spans from the aggregated logs
const mergedTraceSpans = isResume
? traceSpans && traceSpans.length > 0
? traceSpans
: existingLog?.executionData?.traceSpans || []
: traceSpans
const filteredTraceSpans = filterForDisplay(mergedTraceSpans)
const filteredFinalOutput = filterForDisplay(finalOutput)
const redactedTraceSpans = redactApiKeys(filteredTraceSpans)
const redactedFinalOutput = redactApiKeys(filteredFinalOutput)
const [updatedLog] = await db
.update(workflowExecutionLogs)
.set({
level,
endedAt: new Date(endedAt),
totalDurationMs,
files: executionFiles.length > 0 ? executionFiles : null,
executionData: {
traceSpans: redactedTraceSpans,
finalOutput: redactedFinalOutput,
tokenBreakdown: {
prompt: costSummary.totalPromptTokens,
completion: costSummary.totalCompletionTokens,
total: costSummary.totalTokens,
// Merge costs if resuming
const existingCost = isResume && existingLog?.cost ? existingLog.cost : null
const mergedCost = existingCost
? {
// For resume, add only the model costs, NOT the base execution charge again
total: (existingCost.total || 0) + costSummary.modelCost,
input: (existingCost.input || 0) + costSummary.totalInputCost,
output: (existingCost.output || 0) + costSummary.totalOutputCost,
tokens: {
prompt: (existingCost.tokens?.prompt || 0) + costSummary.totalPromptTokens,
completion: (existingCost.tokens?.completion || 0) + costSummary.totalCompletionTokens,
total: (existingCost.tokens?.total || 0) + costSummary.totalTokens,
},
models: costSummary.models,
},
cost: {
models: this.mergeCostModels(existingCost.models || {}, costSummary.models),
}
: {
total: costSummary.totalCost,
input: costSummary.totalInputCost,
output: costSummary.totalOutputCost,
@@ -223,7 +302,36 @@ export class ExecutionLogger implements IExecutionLoggerService {
total: costSummary.totalTokens,
},
models: costSummary.models,
}
// Merge files if resuming
const existingFiles = isResume && existingLog?.files ? existingLog.files : []
const mergedFiles = [...existingFiles, ...executionFiles]
// Calculate the actual total duration for resume executions
const actualTotalDuration =
isResume && existingLog?.startedAt
? new Date(endedAt).getTime() - new Date(existingLog.startedAt).getTime()
: totalDurationMs
const [updatedLog] = await db
.update(workflowExecutionLogs)
.set({
level,
endedAt: new Date(endedAt),
totalDurationMs: actualTotalDuration,
files: mergedFiles.length > 0 ? mergedFiles : null,
executionData: {
traceSpans: redactedTraceSpans,
finalOutput: redactedFinalOutput,
tokenBreakdown: {
prompt: mergedCost.tokens.prompt,
completion: mergedCost.tokens.completion,
total: mergedCost.tokens.total,
},
models: mergedCost.models,
},
cost: mergedCost,
})
.where(eq(workflowExecutionLogs.executionId, executionId))
.returning()

View File

@@ -21,6 +21,7 @@ export interface SessionStartParams {
workspaceId?: string
variables?: Record<string, string>
triggerData?: Record<string, unknown>
skipLogCreation?: boolean // For resume executions - reuse existing log entry
}
export interface SessionCompleteParams {
@@ -49,6 +50,7 @@ export class LoggingSession {
private trigger?: ExecutionTrigger
private environment?: ExecutionEnvironment
private workflowState?: WorkflowState
private isResume = false // Track if this is a resume execution
constructor(
workflowId: string,
@@ -63,7 +65,7 @@ export class LoggingSession {
}
async start(params: SessionStartParams = {}): Promise<void> {
const { userId, workspaceId, variables, triggerData } = params
const { userId, workspaceId, variables, triggerData, skipLogCreation } = params
try {
this.trigger = createTriggerObject(this.triggerType, triggerData)
@@ -76,16 +78,26 @@ export class LoggingSession {
)
this.workflowState = await loadWorkflowStateForExecution(this.workflowId)
await executionLogger.startWorkflowExecution({
workflowId: this.workflowId,
executionId: this.executionId,
trigger: this.trigger,
environment: this.environment,
workflowState: this.workflowState,
})
// Only create a new log entry if not resuming
if (!skipLogCreation) {
await executionLogger.startWorkflowExecution({
workflowId: this.workflowId,
executionId: this.executionId,
trigger: this.trigger,
environment: this.environment,
workflowState: this.workflowState,
})
if (this.requestId) {
logger.debug(`[${this.requestId}] Started logging for execution ${this.executionId}`)
if (this.requestId) {
logger.debug(`[${this.requestId}] Started logging for execution ${this.executionId}`)
}
} else {
this.isResume = true // Mark as resume
if (this.requestId) {
logger.debug(
`[${this.requestId}] Resuming logging for existing execution ${this.executionId}`
)
}
}
} catch (error) {
if (this.requestId) {
@@ -122,6 +134,7 @@ export class LoggingSession {
finalOutput: finalOutput || {},
traceSpans: traceSpans || [],
workflowInput,
isResume: this.isResume,
})
// Track workflow execution outcome

View File

@@ -167,6 +167,29 @@ export function getBlockOutputs(
return getUnifiedStartOutputs(subBlocks)
}
if (blockType === 'approval') {
// Start with only uiUrl (apiUrl commented out - not accessible as output)
const pauseResumeOutputs: Record<string, any> = {
uiUrl: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
}
const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value)
// Add each input format field as a top-level output
for (const field of normalizedInputFormat) {
const fieldName = field?.name?.trim()
if (!fieldName) continue
pauseResumeOutputs[fieldName] = {
type: (field?.type || 'any') as any,
description: `Field from input format`,
}
}
return pauseResumeOutputs
}
if (startPath === StartBlockPath.LEGACY_STARTER) {
return getLegacyStarterOutputs(subBlocks)
}

View File

@@ -31,6 +31,7 @@ export interface ExecuteWorkflowCoreOptions {
snapshot: ExecutionSnapshot
callbacks: ExecutionCallbacks
loggingSession: LoggingSession
skipLogCreation?: boolean // For resume executions - reuse existing log entry
}
function parseVariableValueByType(value: any, type: string): any {
@@ -97,7 +98,7 @@ function parseVariableValueByType(value: any, type: string): any {
export async function executeWorkflowCore(
options: ExecuteWorkflowCoreOptions
): Promise<ExecutionResult> {
const { snapshot, callbacks, loggingSession } = options
const { snapshot, callbacks, loggingSession, skipLogCreation } = options
const { metadata, workflow, input, environmentVariables, workflowVariables, selectedOutputs } =
snapshot
const { requestId, workflowId, userId, triggerType, executionId, triggerBlockId, useDraftState } =
@@ -153,6 +154,7 @@ export async function executeWorkflowCore(
userId,
workspaceId: providedWorkspaceId,
variables,
skipLogCreation, // Skip if resuming an existing execution
})
// Process block states with env var substitution
@@ -225,8 +227,19 @@ export async function executeWorkflowCore(
const filteredEdges = filterEdgesFromTriggerBlocks(mergedStates, edges)
// Check if this is a resume execution before trigger resolution
const resumeFromSnapshot = (metadata as any).resumeFromSnapshot === true
const resumePendingQueue = snapshot.state?.pendingQueue
let resolvedTriggerBlockId = triggerBlockId
if (!triggerBlockId) {
// For resume executions, skip trigger resolution since we have a pending queue
if (resumeFromSnapshot && resumePendingQueue?.length) {
resolvedTriggerBlockId = undefined
logger.info(`[${requestId}] Skipping trigger resolution for resume execution`, {
pendingQueueLength: resumePendingQueue.length,
})
} else if (!triggerBlockId) {
const executionKind =
triggerType === 'api' || triggerType === 'chat' ? (triggerType as 'api' | 'chat') : 'manual'
@@ -263,6 +276,18 @@ export async function executeWorkflowCore(
processedInput = input || {}
// Create and execute workflow with callbacks
if (resumeFromSnapshot) {
logger.info(`[${requestId}] Resume execution detected`, {
resumePendingQueue,
hasState: !!snapshot.state,
stateBlockStatesCount: snapshot.state
? Object.keys(snapshot.state.blockStates || {}).length
: 0,
executedBlocksCount: snapshot.state?.executedBlocks?.length ?? 0,
useDraftState,
})
}
const contextExtensions: any = {
stream: !!onStream,
selectedOutputs,
@@ -273,6 +298,11 @@ export async function executeWorkflowCore(
onBlockStart,
onBlockComplete,
onStream,
resumeFromSnapshot,
resumePendingQueue,
remainingEdges: snapshot.state?.remainingEdges,
dagIncomingEdges: snapshot.state?.dagIncomingEdges,
snapshotState: snapshot.state,
}
const executorInstance = new Executor({
@@ -305,11 +335,11 @@ export async function executeWorkflowCore(
resolvedTriggerBlockId
)) as ExecutionResult
// Build trace spans for logging
// Build trace spans for logging from the full execution result
const { traceSpans, totalDuration } = buildTraceSpans(result)
// Update workflow run counts
if (result.success) {
if (result.success && result.status !== 'paused') {
await updateWorkflowRunCounts(workflowId)
}

File diff suppressed because it is too large Load Diff

View File

@@ -118,6 +118,7 @@ export interface WorkflowLog {
bucketName?: string
}>
cost?: CostMetadata
hasPendingPause?: boolean
executionData?: ToolCallMetadata & {
traceSpans?: TraceSpan[]
totalDuration?: number

View File

@@ -4,7 +4,6 @@ import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import {
@@ -120,9 +119,7 @@ export const useWorkflowStore = create<WorkflowStore>()(
// Get outputs based on trigger mode
const triggerMode = blockProperties?.triggerMode ?? false
const outputs = triggerMode
? getBlockOutputs(type, subBlocks, triggerMode)
: resolveOutputType(blockConfig.outputs)
const outputs = getBlockOutputs(type, subBlocks, triggerMode)
const newState = {
blocks: {

View File

@@ -27,7 +27,9 @@
"@sim/db": ["../../packages/db"],
"@sim/db/*": ["../../packages/db/*"],
"@/executor": ["./executor"],
"@/executor/*": ["./executor/*"]
"@/executor/*": ["./executor/*"],
"@/executor/pause-resume": ["./executor/pause-resume"],
"@/executor/pause-resume/*": ["./executor/pause-resume/*"]
},
"allowJs": true,
"noEmit": true,

View File

@@ -0,0 +1,37 @@
CREATE TABLE "paused_executions" (
"id" text PRIMARY KEY NOT NULL,
"workflow_id" text NOT NULL,
"execution_id" text NOT NULL,
"execution_snapshot" jsonb NOT NULL,
"pause_points" jsonb NOT NULL,
"total_pause_count" integer NOT NULL,
"resumed_count" integer DEFAULT 0 NOT NULL,
"status" text DEFAULT 'paused' NOT NULL,
"metadata" jsonb DEFAULT '{}'::jsonb NOT NULL,
"paused_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
"expires_at" timestamp
);
--> statement-breakpoint
CREATE TABLE "resume_queue" (
"id" text PRIMARY KEY NOT NULL,
"paused_execution_id" text NOT NULL,
"parent_execution_id" text NOT NULL,
"new_execution_id" text NOT NULL,
"context_id" text NOT NULL,
"resume_input" jsonb,
"status" text DEFAULT 'pending' NOT NULL,
"queued_at" timestamp DEFAULT now() NOT NULL,
"claimed_at" timestamp,
"completed_at" timestamp,
"failure_reason" text
);
--> statement-breakpoint
ALTER TABLE "custom_tools" ALTER COLUMN "workspace_id" DROP NOT NULL;--> statement-breakpoint
ALTER TABLE "paused_executions" ADD CONSTRAINT "paused_executions_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "resume_queue" ADD CONSTRAINT "resume_queue_paused_execution_id_paused_executions_id_fk" FOREIGN KEY ("paused_execution_id") REFERENCES "public"."paused_executions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "paused_executions_workflow_id_idx" ON "paused_executions" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "paused_executions_status_idx" ON "paused_executions" USING btree ("status");--> statement-breakpoint
CREATE UNIQUE INDEX "paused_executions_execution_id_unique" ON "paused_executions" USING btree ("execution_id");--> statement-breakpoint
CREATE INDEX "resume_queue_parent_status_idx" ON "resume_queue" USING btree ("parent_execution_id","status","queued_at");--> statement-breakpoint
CREATE INDEX "resume_queue_new_execution_idx" ON "resume_queue" USING btree ("new_execution_id");

File diff suppressed because it is too large Load Diff

View File

@@ -736,6 +736,13 @@
"when": 1761860659858,
"tag": "0105_glamorous_wrecking_crew",
"breakpoints": true
},
{
"idx": 106,
"version": "7",
"when": 1762371130884,
"tag": "0106_bitter_captain_midlands",
"breakpoints": true
}
]
}

View File

@@ -317,6 +317,58 @@ export const workflowExecutionLogs = pgTable(
})
)
export const pausedExecutions = pgTable(
'paused_executions',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
executionId: text('execution_id').notNull(),
executionSnapshot: jsonb('execution_snapshot').notNull(),
pausePoints: jsonb('pause_points').notNull(),
totalPauseCount: integer('total_pause_count').notNull(),
resumedCount: integer('resumed_count').notNull().default(0),
status: text('status').notNull().default('paused'),
metadata: jsonb('metadata').notNull().default(sql`'{}'::jsonb`),
pausedAt: timestamp('paused_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
expiresAt: timestamp('expires_at'),
},
(table) => ({
workflowIdx: index('paused_executions_workflow_id_idx').on(table.workflowId),
statusIdx: index('paused_executions_status_idx').on(table.status),
executionUnique: uniqueIndex('paused_executions_execution_id_unique').on(table.executionId),
})
)
export const resumeQueue = pgTable(
'resume_queue',
{
id: text('id').primaryKey(),
pausedExecutionId: text('paused_execution_id')
.notNull()
.references(() => pausedExecutions.id, { onDelete: 'cascade' }),
parentExecutionId: text('parent_execution_id').notNull(),
newExecutionId: text('new_execution_id').notNull(),
contextId: text('context_id').notNull(),
resumeInput: jsonb('resume_input'),
status: text('status').notNull().default('pending'),
queuedAt: timestamp('queued_at').notNull().defaultNow(),
claimedAt: timestamp('claimed_at'),
completedAt: timestamp('completed_at'),
failureReason: text('failure_reason'),
},
(table) => ({
parentStatusIdx: index('resume_queue_parent_status_idx').on(
table.parentExecutionId,
table.status,
table.queuedAt
),
newExecutionIdx: index('resume_queue_new_execution_idx').on(table.newExecutionId),
})
)
export const environment = pgTable('environment', {
id: text('id').primaryKey(), // Use the user id as the key
userId: text('user_id')