feat(hitl): add human in the loop block (#1832)

* fix(billing): should allow restoring subscription (#1728)

* fix(already-cancelled-sub): UI should allow restoring subscription

* restore functionality fixed

* fix

* Add pause resume block

* Add db schema

* Initial test passes

* Tests pass

* Execution pauses

* Snapshot serializer

* Ui checkpoint

* Works 1

* Pause resume simple v1

* Hitl block works in parallel branches without timing overlap

* Pending status to logs

* Pause resume ui link

* Big context consolidation

* HITL works in loops

* Fix parallels

* Reference blocks properly

* Fix tag dropdown and start block resolution

* Filter console logs for hitl block

* Fix notifs

* Fix logs page

* Fix logs page again

* Fix

* Checkpoint

* Cleanup v1

* Refactor v2

* Refactor v3

* Refactor v4

* Refactor v5

* Resume page

* Fix variables in loops

* Fix var res bugs

* Ui changes

* Approval block

* Hitl works e2e v1

* Fix tets

* Row level lock

---------

Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
This commit is contained in:
Siddharth Ganesan
2025-11-06 15:59:28 -08:00
committed by GitHub
parent f9ce65eddf
commit 742d59f54d
90 changed files with 13498 additions and 1128 deletions

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema' import { pausedExecutions, permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm' import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
@@ -68,6 +68,9 @@ export async function GET(request: NextRequest) {
workflowWorkspaceId: workflow.workspaceId, workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt, workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt, workflowUpdatedAt: workflow.updatedAt,
pausedStatus: pausedExecutions.status,
pausedTotalPauseCount: pausedExecutions.totalPauseCount,
pausedResumedCount: pausedExecutions.resumedCount,
} }
: { : {
// Basic mode - exclude large fields for better performance // Basic mode - exclude large fields for better performance
@@ -92,11 +95,18 @@ export async function GET(request: NextRequest) {
workflowWorkspaceId: workflow.workspaceId, workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt, workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt, workflowUpdatedAt: workflow.updatedAt,
pausedStatus: pausedExecutions.status,
pausedTotalPauseCount: pausedExecutions.totalPauseCount,
pausedResumedCount: pausedExecutions.resumedCount,
} }
const baseQuery = db const baseQuery = db
.select(selectColumns) .select(selectColumns)
.from(workflowExecutionLogs) .from(workflowExecutionLogs)
.leftJoin(
pausedExecutions,
eq(pausedExecutions.executionId, workflowExecutionLogs.executionId)
)
.innerJoin( .innerJoin(
workflow, workflow,
and( and(
@@ -186,6 +196,10 @@ export async function GET(request: NextRequest) {
const countQuery = db const countQuery = db
.select({ count: sql<number>`count(*)` }) .select({ count: sql<number>`count(*)` })
.from(workflowExecutionLogs) .from(workflowExecutionLogs)
.leftJoin(
pausedExecutions,
eq(pausedExecutions.executionId, workflowExecutionLogs.executionId)
)
.innerJoin( .innerJoin(
workflow, workflow,
and( and(
@@ -340,13 +354,18 @@ export async function GET(request: NextRequest) {
return { return {
id: log.id, id: log.id,
workflowId: log.workflowId, workflowId: log.workflowId,
executionId: params.details === 'full' ? log.executionId : undefined, executionId: log.executionId,
level: log.level, level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null, duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger, trigger: log.trigger,
createdAt: log.startedAt.toISOString(), createdAt: log.startedAt.toISOString(),
files: params.details === 'full' ? log.files || undefined : undefined, files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary, workflow: workflowSummary,
pauseSummary: {
status: log.pausedStatus ?? null,
total: log.pausedTotalPauseCount ?? 0,
resumed: log.pausedResumedCount ?? 0,
},
executionData: executionData:
params.details === 'full' params.details === 'full'
? { ? {
@@ -361,6 +380,10 @@ export async function GET(request: NextRequest) {
params.details === 'full' params.details === 'full'
? (costSummary as any) ? (costSummary as any)
: { total: (costSummary as any)?.total || 0 }, : { total: (costSummary as any)?.total || 0 },
hasPendingPause:
(Number(log.pausedTotalPauseCount ?? 0) > 0 &&
Number(log.pausedResumedCount ?? 0) < Number(log.pausedTotalPauseCount ?? 0)) ||
(log.pausedStatus && log.pausedStatus !== 'fully_resumed'),
} }
}) })
return NextResponse.json( return NextResponse.json(

View File

@@ -0,0 +1,116 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
const logger = createLogger('WorkflowResumeAPI')
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function POST(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string; contextId: string }>
}
) {
const { workflowId, executionId, contextId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const workflow = access.workflow!
let payload: any = {}
try {
payload = await request.json()
} catch {
payload = {}
}
const resumeInput = payload?.input ?? payload ?? {}
const userId = workflow.userId ?? ''
try {
const enqueueResult = await PauseResumeManager.enqueueOrStartResume({
executionId,
contextId,
resumeInput,
userId,
})
if (enqueueResult.status === 'queued') {
return NextResponse.json({
status: 'queued',
executionId: enqueueResult.resumeExecutionId,
queuePosition: enqueueResult.queuePosition,
message: 'Resume queued. It will run after current resumes finish.',
})
}
PauseResumeManager.startResumeExecution({
resumeEntryId: enqueueResult.resumeEntryId,
resumeExecutionId: enqueueResult.resumeExecutionId,
pausedExecution: enqueueResult.pausedExecution,
contextId: enqueueResult.contextId,
resumeInput: enqueueResult.resumeInput,
userId: enqueueResult.userId,
}).catch((error) => {
logger.error('Failed to start resume execution', {
workflowId,
parentExecutionId: executionId,
resumeExecutionId: enqueueResult.resumeExecutionId,
error,
})
})
return NextResponse.json({
status: 'started',
executionId: enqueueResult.resumeExecutionId,
message: 'Resume execution started.',
})
} catch (error: any) {
logger.error('Resume request failed', {
workflowId,
executionId,
contextId,
error,
})
return NextResponse.json(
{ error: error.message || 'Failed to queue resume request' },
{ status: 400 }
)
}
}
export async function GET(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string; contextId: string }>
}
) {
const { workflowId, executionId, contextId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const detail = await PauseResumeManager.getPauseContextDetail({
workflowId,
executionId,
contextId,
})
if (!detail) {
return NextResponse.json({ error: 'Pause context not found' }, { status: 404 })
}
return NextResponse.json(detail)
}

View File

@@ -0,0 +1,48 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
const logger = createLogger('WorkflowResumeExecutionAPI')
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: Promise<{ workflowId: string; executionId: string }>
}
) {
const { workflowId, executionId } = await params
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
try {
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
if (!detail) {
return NextResponse.json({ error: 'Paused execution not found' }, { status: 404 })
}
return NextResponse.json(detail)
} catch (error: any) {
logger.error('Failed to load paused execution detail', {
workflowId,
executionId,
error,
})
return NextResponse.json(
{ error: error?.message || 'Failed to load paused execution detail' },
{ status: 500 }
)
}
}

View File

@@ -12,6 +12,7 @@ import {
} from '@/lib/workflows/db-helpers' } from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events' import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware' import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot' import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { StreamingExecution } from '@/executor/types' import type { StreamingExecution } from '@/executor/types'
@@ -135,6 +136,24 @@ export async function executeWorkflow(
loggingSession, loggingSession,
}) })
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
if (streamConfig?.skipLoggingComplete) { if (streamConfig?.skipLoggingComplete) {
return { return {
...result, ...result,
@@ -605,6 +624,24 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
loggingSession, loggingSession,
}) })
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
if (result.error === 'Workflow execution was cancelled') { if (result.error === 'Workflow execution was cancelled') {
logger.info(`[${requestId}] Workflow execution was cancelled`) logger.info(`[${requestId}] Workflow execution was cancelled`)
sendEvent({ sendEvent({

View File

@@ -0,0 +1,34 @@
import { type NextRequest, NextResponse } from 'next/server'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: { id: string; executionId: string }
}
) {
const workflowId = params.id
const executionId = params.executionId
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
if (!detail) {
return NextResponse.json({ error: 'Paused execution not found' }, { status: 404 })
}
return NextResponse.json(detail)
}

View File

@@ -0,0 +1,31 @@
import { type NextRequest, NextResponse } from 'next/server'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
request: NextRequest,
{
params,
}: {
params: { id: string }
}
) {
const workflowId = params.id
const access = await validateWorkflowAccess(request, workflowId, false)
if (access.error) {
return NextResponse.json({ error: access.error.message }, { status: access.error.status })
}
const statusFilter = request.nextUrl.searchParams.get('status') || undefined
const pausedExecutions = await PauseResumeManager.listPausedExecutions({
workflowId,
status: statusFilter,
})
return NextResponse.json({ pausedExecutions })
}

View File

@@ -0,0 +1,15 @@
import { redirect } from 'next/navigation'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
interface PageParams {
workflowId: string
executionId: string
contextId: string
}
export default async function ResumePage({ params }: { params: Promise<PageParams> }) {
const { workflowId, executionId, contextId } = await params
redirect(`/resume/${workflowId}/${executionId}?contextId=${contextId}`)
}

View File

@@ -0,0 +1,40 @@
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import ResumeExecutionPage from './resume-page-client'
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
interface PageParams {
workflowId: string
executionId: string
}
export default async function ResumeExecutionPageWrapper({
params,
searchParams,
}: {
params: Promise<PageParams>
searchParams: Promise<Record<string, string | string[] | undefined>>
}) {
const resolvedParams = await params
const resolvedSearchParams = await searchParams
const { workflowId, executionId } = resolvedParams
const initialContextIdParam = resolvedSearchParams?.contextId
const initialContextId = Array.isArray(initialContextIdParam)
? initialContextIdParam[0]
: initialContextIdParam
const detail = await PauseResumeManager.getPausedExecutionDetail({
workflowId,
executionId,
})
return (
<ResumeExecutionPage
params={resolvedParams}
initialExecutionDetail={detail ? JSON.parse(JSON.stringify(detail)) : null}
initialContextId={initialContextId}
/>
)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,6 @@
import { useEffect, useMemo, useRef, useState } from 'react' import { useEffect, useMemo, useRef, useState } from 'react'
import { Info, Loader2 } from 'lucide-react' import { ArrowUpRight, Info, Loader2 } from 'lucide-react'
import Link from 'next/link'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { cn } from '@/lib/utils' import { cn } from '@/lib/utils'
import LineChart, { import LineChart, {
@@ -27,6 +28,7 @@ export interface ExecutionLogItem {
} | null } | null
workflowName?: string workflowName?: string
workflowColor?: string workflowColor?: string
hasPendingPause?: boolean
} }
export interface WorkflowDetailsData { export interface WorkflowDetailsData {
@@ -263,7 +265,7 @@ export function WorkflowDetails({
<div className='w-full overflow-x-auto'> <div className='w-full overflow-x-auto'>
<div> <div>
<div className='border-border border-b'> <div className='border-border border-b'>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] gap-2 px-2 pb-3 md:gap-3 lg:min-w-0 lg:gap-4'> <div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px_40px] gap-2 px-2 pb-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div className='font-[460] font-sans text-[13px] text-muted-foreground leading-normal'> <div className='font-[460] font-sans text-[13px] text-muted-foreground leading-normal'>
Time Time
</div> </div>
@@ -285,6 +287,9 @@ export function WorkflowDetails({
<div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'> <div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Duration Duration
</div> </div>
<div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Resume
</div>
</div> </div>
</div> </div>
</div> </div>
@@ -317,6 +322,12 @@ export function WorkflowDetails({
const outputsStr = log.outputs ? JSON.stringify(log.outputs) : '—' const outputsStr = log.outputs ? JSON.stringify(log.outputs) : '—'
const errorStr = log.errorMessage || '' const errorStr = log.errorMessage || ''
const isExpanded = expandedRowId === log.id const isExpanded = expandedRowId === log.id
const baseLevel = (log.level || 'info').toLowerCase()
const isPending = log.hasPendingPause === true
const isError = baseLevel === 'error'
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return ( return (
<div <div
@@ -329,7 +340,7 @@ export function WorkflowDetails({
setExpandedRowId((prev) => (prev === log.id ? null : log.id)) setExpandedRowId((prev) => (prev === log.id ? null : log.id))
} }
> >
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] items-center gap-2 px-2 py-3 md:gap-3 lg:min-w-0 lg:gap-4'> <div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px_40px] items-center gap-2 px-2 py-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div> <div>
<div className='text-[13px]'> <div className='text-[13px]'>
<span className='font-sm text-muted-foreground'> <span className='font-sm text-muted-foreground'>
@@ -348,12 +359,14 @@ export function WorkflowDetails({
<div <div
className={cn( className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-[400] text-xs transition-all duration-200 lg:px-[8px]', 'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-[400] text-xs transition-all duration-200 lg:px-[8px]',
log.level === 'error' isError
? 'bg-red-500 text-white' ? 'bg-red-500 text-white'
: isPending
? 'bg-amber-300 text-amber-900 dark:bg-amber-500/90 dark:text-black'
: 'bg-secondary text-card-foreground' : 'bg-secondary text-card-foreground'
)} )}
> >
{log.level} {statusLabel}
</div> </div>
</div> </div>
@@ -423,6 +436,20 @@ export function WorkflowDetails({
{typeof log.duration === 'number' ? `${log.duration}ms` : '—'} {typeof log.duration === 'number' ? `${log.duration}ms` : '—'}
</div> </div>
</div> </div>
<div className='flex justify-end'>
{isPending && log.executionId ? (
<Link
href={`/resume/${expandedWorkflowId}/${log.executionId}`}
className='inline-flex h-7 w-7 items-center justify-center rounded-md border border-primary/60 border-dashed text-primary hover:bg-primary/10'
aria-label='Open resume console'
>
<ArrowUpRight className='h-4 w-4' />
</Link>
) : (
<span className='h-7 w-7' />
)}
</div>
</div> </div>
{isExpanded && ( {isExpanded && (
<div className='px-2 pt-0 pb-4'> <div className='px-2 pt-0 pb-4'>

View File

@@ -432,13 +432,22 @@ export function Sidebar({
</div> </div>
)} )}
{/* Level */} {/* Status */}
<div> <div>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Level</h3> <h3 className='mb-1 font-medium text-muted-foreground text-xs'>Status</h3>
{(() => {
const baseLevel = (log.level || 'info').toLowerCase()
const isPending = log.duration == null
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return (
<div className='group relative text-sm capitalize'> <div className='group relative text-sm capitalize'>
<CopyButton text={log.level} /> <CopyButton text={statusLabel} />
{log.level} {statusLabel}
</div> </div>
)
})()}
</div> </div>
{/* Trigger */} {/* Trigger */}

View File

@@ -1,7 +1,8 @@
'use client' 'use client'
import { useCallback, useEffect, useRef, useState } from 'react' import { useCallback, useEffect, useRef, useState } from 'react'
import { AlertCircle, Info, Loader2 } from 'lucide-react' import { AlertCircle, ArrowUpRight, Info, Loader2 } from 'lucide-react'
import Link from 'next/link'
import { useParams } from 'next/navigation' import { useParams } from 'next/navigation'
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser' import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
@@ -775,6 +776,13 @@ export default function Logs() {
{logs.map((log) => { {logs.map((log) => {
const formattedDate = formatDate(log.createdAt) const formattedDate = formatDate(log.createdAt)
const isSelected = selectedLog?.id === log.id const isSelected = selectedLog?.id === log.id
const baseLevel = (log.level || 'info').toLowerCase()
const isError = baseLevel === 'error'
// If it's an error, don't treat it as pending even if hasPendingPause is true
const isPending = !isError && log.hasPendingPause === true
const statusLabel = isPending
? 'Pending'
: `${baseLevel.charAt(0).toUpperCase()}${baseLevel.slice(1)}`
return ( return (
<div <div
@@ -785,7 +793,7 @@ export default function Logs() {
}`} }`}
onClick={() => handleLogClick(log)} onClick={() => handleLogClick(log)}
> >
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'> <div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px_40px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px_40px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px_40px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px_40px]'>
{/* Time */} {/* Time */}
<div> <div>
<div className='text-[13px]'> <div className='text-[13px]'>
@@ -806,12 +814,14 @@ export default function Logs() {
<div <div
className={cn( className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-medium text-xs transition-all duration-200 lg:px-[8px]', 'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-medium text-xs transition-all duration-200 lg:px-[8px]',
log.level === 'error' isError
? 'bg-red-500 text-white' ? 'bg-red-500 text-white'
: isPending
? 'bg-amber-300 text-amber-900 dark:bg-amber-500/90 dark:text-black'
: 'bg-secondary text-card-foreground' : 'bg-secondary text-card-foreground'
)} )}
> >
{log.level} {statusLabel}
</div> </div>
</div> </div>
@@ -860,6 +870,23 @@ export default function Logs() {
{log.duration || '—'} {log.duration || '—'}
</div> </div>
</div> </div>
{/* Resume Link */}
<div className='flex justify-end'>
{isPending &&
log.executionId &&
(log.workflow?.id || log.workflowId) ? (
<Link
href={`/resume/${log.workflow?.id || log.workflowId}/${log.executionId}`}
className='inline-flex h-7 w-7 items-center justify-center rounded-md border border-primary/60 border-dashed text-primary hover:bg-primary/10'
aria-label='Open resume console'
>
<ArrowUpRight className='h-4 w-4' />
</Link>
) : (
<span className='h-7 w-7' />
)}
</div>
</div> </div>
</div> </div>
) )

View File

@@ -95,6 +95,7 @@ export interface ExecutionLog {
} | null } | null
workflowName?: string workflowName?: string
workflowColor?: string workflowColor?: string
hasPendingPause?: boolean
} }
/** /**
@@ -133,6 +134,7 @@ export function mapToExecutionLog(log: any): ExecutionLog {
: null, : null,
workflowName: log.workflowName || log.workflow?.name, workflowName: log.workflowName || log.workflow?.name,
workflowColor: log.workflowColor || log.workflow?.color, workflowColor: log.workflowColor || log.workflow?.color,
hasPendingPause: log.hasPendingPause === true,
} }
} }
@@ -164,6 +166,7 @@ export function mapToExecutionLogAlt(log: any): ExecutionLog {
: null, : null,
workflowName: log.workflow?.name, workflowName: log.workflow?.name,
workflowColor: log.workflow?.color, workflowColor: log.workflow?.color,
hasPendingPause: log.hasPendingPause === true,
} }
} }

View File

@@ -17,6 +17,7 @@ import {
import { decryptSecret } from '@/lib/utils' import { decryptSecret } from '@/lib/utils'
import { blockExistsInDeployment, loadDeployedWorkflowState } from '@/lib/workflows/db-helpers' import { blockExistsInDeployment, loadDeployedWorkflowState } from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils' import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot' import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import { Serializer } from '@/serializer' import { Serializer } from '@/serializer'
@@ -452,6 +453,24 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
loggingSession, loggingSession,
}) })
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Workflow execution completed: ${payload.workflowId}`, { logger.info(`[${requestId}] Workflow execution completed: ${payload.workflowId}`, {
success: executionResult.success, success: executionResult.success,
executionTime: executionResult.metadata?.duration, executionTime: executionResult.metadata?.duration,

View File

@@ -17,6 +17,7 @@ import {
loadWorkflowFromNormalizedTables, loadWorkflowFromNormalizedTables,
} from '@/lib/workflows/db-helpers' } from '@/lib/workflows/db-helpers'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkflowById } from '@/lib/workflows/utils' import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot' import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult } from '@/executor/types' import type { ExecutionResult } from '@/executor/types'
@@ -250,6 +251,24 @@ async function executeWebhookJobInternal(
loggingSession, loggingSession,
}) })
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Airtable webhook execution completed`, { logger.info(`[${requestId}] Airtable webhook execution completed`, {
success: executionResult.success, success: executionResult.success,
workflowId: payload.workflowId, workflowId: payload.workflowId,
@@ -445,6 +464,24 @@ async function executeWebhookJobInternal(
loggingSession, loggingSession,
}) })
if (executionResult.status === 'paused') {
if (!executionResult.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId: payload.workflowId,
executionId,
pausePoints: executionResult.pausePoints || [],
snapshotSeed: executionResult.snapshotSeed,
executorUserId: executionResult.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Webhook execution completed`, { logger.info(`[${requestId}] Webhook execution completed`, {
success: executionResult.success, success: executionResult.success,
workflowId: payload.workflowId, workflowId: payload.workflowId,

View File

@@ -7,6 +7,7 @@ import { checkServerSideUsageLimits } from '@/lib/billing'
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/pause-resume-manager'
import { getWorkflowById } from '@/lib/workflows/utils' import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot' import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
@@ -119,6 +120,24 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
loggingSession, loggingSession,
}) })
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
executionId,
})
} else {
await PauseResumeManager.persistPauseResult({
workflowId,
executionId,
pausePoints: result.pausePoints || [],
snapshotSeed: result.snapshotSeed,
executorUserId: result.metadata?.userId,
})
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
}
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, { logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: result.success, success: result.success,
executionTime: result.metadata?.duration, executionTime: result.metadata?.duration,

View File

@@ -0,0 +1,169 @@
import type { SVGProps } from 'react'
import { createElement } from 'react'
import { UserCheck } from 'lucide-react'
import type { BlockConfig } from '@/blocks/types'
import type { ResponseBlockOutput } from '@/tools/response/types'
const ApprovalIcon = (props: SVGProps<SVGSVGElement>) => createElement(UserCheck, props)
export const PauseResumeBlock: BlockConfig<ResponseBlockOutput> = {
type: 'approval',
name: 'Approval',
description: 'Pause workflow execution and send structured API response',
longDescription:
'Combines response and start functionality. Sends structured responses and allows workflow to resume from this point.',
category: 'blocks',
bgColor: '#10B981',
icon: ApprovalIcon,
subBlocks: [
// Operation dropdown hidden - block defaults to human approval mode
// {
// id: 'operation',
// title: 'Operation',
// type: 'dropdown',
// layout: 'full',
// options: [
// { label: 'Human Approval', id: 'human' },
// { label: 'API Response', id: 'api' },
// ],
// value: () => 'human',
// description: 'Choose whether to wait for human approval or send an API response',
// },
{
id: 'builderData',
title: 'Paused Output',
type: 'response-format',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description:
'Define the structure of your response data. Use <variable.name> in field names to reference workflow variables.',
},
{
id: 'notification',
title: 'Notification',
type: 'tool-input',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description: 'Configure notification tools to alert approvers (e.g., Slack, Email)',
defaultValue: [],
},
// API mode subBlocks commented out - only human approval mode is supported
// {
// id: 'dataMode',
// title: 'Response Data Mode',
// type: 'dropdown',
// layout: 'full',
// options: [
// { label: 'Builder', id: 'structured' },
// { label: 'Editor', id: 'json' },
// ],
// value: () => 'structured',
// condition: { field: 'operation', value: 'api' },
// description: 'Choose how to define your response data structure',
// },
{
id: 'inputFormat',
title: 'Resume Input',
type: 'input-format',
layout: 'full',
// condition: { field: 'operation', value: 'human' }, // Always shown since we only support human mode
description: 'Define the fields the approver can fill in when resuming',
},
// {
// id: 'data',
// title: 'Response Data',
// type: 'code',
// layout: 'full',
// placeholder: '{\n "message": "Hello world",\n "userId": "<variable.userId>"\n}',
// language: 'json',
// condition: {
// field: 'operation',
// value: 'api',
// and: { field: 'dataMode', value: 'json' },
// },
// description:
// 'Data that will be sent as the response body on API calls. Use <variable.name> to reference workflow variables.',
// wandConfig: {
// enabled: true,
// maintainHistory: true,
// prompt: `You are an expert JSON programmer.
// Generate ONLY the raw JSON object based on the user's request.
// The output MUST be a single, valid JSON object, starting with { and ending with }.
//
// Current response: {context}
//
// Do not include any explanations, markdown formatting, or other text outside the JSON object.
//
// You have access to the following variables you can use to generate the JSON body:
// - 'params' (object): Contains input parameters derived from the JSON schema. Access these directly using the parameter name wrapped in angle brackets, e.g., '<paramName>'. Do NOT use 'params.paramName'.
// - 'environmentVariables' (object): Contains environment variables. Reference these using the double curly brace syntax: '{{ENV_VAR_NAME}}'. Do NOT use 'environmentVariables.VAR_NAME' or env.
//
// Example:
// {
// "name": "<block.agent.response.content>",
// "age": <block.function.output.age>,
// "success": true
// }`,
// placeholder: 'Describe the API response structure you need...',
// generationType: 'json-object',
// },
// },
// {
// id: 'status',
// title: 'Status Code',
// type: 'short-input',
// layout: 'half',
// placeholder: '200',
// condition: { field: 'operation', value: 'api' },
// description: 'HTTP status code (default: 200)',
// },
// {
// id: 'headers',
// title: 'Response Headers',
// type: 'table',
// layout: 'full',
// columns: ['Key', 'Value'],
// condition: { field: 'operation', value: 'api' },
// description: 'Additional HTTP headers to include in the response',
// },
],
tools: { access: [] },
inputs: {
operation: {
type: 'string',
description: 'Operation mode: human or api',
},
inputFormat: {
type: 'json',
description: 'Input fields for resume',
},
notification: {
type: 'json',
description: 'Notification tools configuration',
},
dataMode: {
type: 'string',
description: 'Response data definition mode',
},
builderData: {
type: 'json',
description: 'Structured response data',
},
data: {
type: 'json',
description: 'JSON response body',
},
status: {
type: 'number',
description: 'HTTP status code',
},
headers: {
type: 'json',
description: 'Response headers',
},
},
outputs: {
uiUrl: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
},
}

View File

@@ -50,6 +50,7 @@ import { OneDriveBlock } from '@/blocks/blocks/onedrive'
import { OpenAIBlock } from '@/blocks/blocks/openai' import { OpenAIBlock } from '@/blocks/blocks/openai'
import { OutlookBlock } from '@/blocks/blocks/outlook' import { OutlookBlock } from '@/blocks/blocks/outlook'
import { ParallelBlock } from '@/blocks/blocks/parallel' import { ParallelBlock } from '@/blocks/blocks/parallel'
import { PauseResumeBlock } from '@/blocks/blocks/pause_resume'
import { PerplexityBlock } from '@/blocks/blocks/perplexity' import { PerplexityBlock } from '@/blocks/blocks/perplexity'
import { PineconeBlock } from '@/blocks/blocks/pinecone' import { PineconeBlock } from '@/blocks/blocks/pinecone'
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql' import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
@@ -95,6 +96,7 @@ export const registry: Record<string, BlockConfig> = {
agent: AgentBlock, agent: AgentBlock,
airtable: AirtableBlock, airtable: AirtableBlock,
api: ApiBlock, api: ApiBlock,
approval: PauseResumeBlock,
arxiv: ArxivBlock, arxiv: ArxivBlock,
browser_use: BrowserUseBlock, browser_use: BrowserUseBlock,
clay: ClayBlock, clay: ClayBlock,

View File

@@ -505,6 +505,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const outputPaths = generateOutputPaths(blockConfig.outputs || {}) const outputPaths = generateOutputPaths(blockConfig.outputs || {})
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
} }
} else if (sourceBlock.type === 'approval') {
// For approval block, use dynamic outputs based on inputFormat
const dynamicOutputs = getBlockOutputPaths(sourceBlock.type, mergedSubBlocks)
// If it's a self-reference, only show uiUrl (available immediately)
const isSelfReference = activeSourceBlockId === blockId
if (dynamicOutputs.length > 0) {
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
}
} else { } else {
// Check for tool-specific outputs first // Check for tool-specific outputs first
const operationValue = const operationValue =
@@ -698,7 +713,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
if (!accessibleBlock) continue if (!accessibleBlock) continue
// Skip the current block - blocks cannot reference their own outputs // Skip the current block - blocks cannot reference their own outputs
if (accessibleBlockId === blockId) continue // Exception: approval blocks can reference their own outputs
if (accessibleBlockId === blockId && accessibleBlock.type !== 'approval') continue
const blockConfig = getBlock(accessibleBlock.type) const blockConfig = getBlock(accessibleBlock.type)
@@ -817,6 +833,21 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const outputPaths = generateOutputPaths(blockConfig.outputs || {}) const outputPaths = generateOutputPaths(blockConfig.outputs || {})
blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`) blockTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
} }
} else if (accessibleBlock.type === 'approval') {
// For approval block, use dynamic outputs based on inputFormat
const dynamicOutputs = getBlockOutputPaths(accessibleBlock.type, mergedSubBlocks)
// If it's a self-reference, only show uiUrl (available immediately)
const isSelfReference = accessibleBlockId === blockId
if (dynamicOutputs.length > 0) {
const allTags = dynamicOutputs.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs || {})
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.uiUrl')) : allTags
}
} else { } else {
// Check for tool-specific outputs first // Check for tool-specific outputs first
const operationValue = const operationValue =

View File

@@ -130,7 +130,7 @@ export const setupExecutorCoreMocks = () => {
LoopManager: vi.fn().mockImplementation(() => ({ LoopManager: vi.fn().mockImplementation(() => ({
processLoopIterations: vi.fn().mockResolvedValue(false), processLoopIterations: vi.fn().mockResolvedValue(false),
getLoopIndex: vi.fn().mockImplementation((loopId, blockId, context) => { getLoopIndex: vi.fn().mockImplementation((loopId, blockId, context) => {
return context.loopIterations?.get(loopId) || 0 return context.loopExecutions?.get(loopId)?.iteration || 0
}), }),
})), })),
})) }))
@@ -463,8 +463,7 @@ export const createWorkflowWithResponse = (): SerializedWorkflow => ({
*/ */
export interface MockContextOptions { export interface MockContextOptions {
workflowId?: string workflowId?: string
loopIterations?: Map<string, number> loopExecutions?: Map<string, any>
loopItems?: Map<string, any>
executedBlocks?: Set<string> executedBlocks?: Set<string>
activeExecutionPath?: Set<string> activeExecutionPath?: Set<string>
completedLoops?: Set<string> completedLoops?: Set<string>
@@ -485,13 +484,12 @@ export const createMockContext = (options: MockContextOptions = {}) => {
metadata: { startTime: new Date().toISOString(), duration: 0 }, metadata: { startTime: new Date().toISOString(), duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: options.loopIterations || new Map(), loopExecutions: options.loopExecutions || new Map(),
loopItems: options.loopItems || new Map(),
executedBlocks: options.executedBlocks || new Set<string>(), executedBlocks: options.executedBlocks || new Set<string>(),
activeExecutionPath: options.activeExecutionPath || new Set<string>(), activeExecutionPath: options.activeExecutionPath || new Set<string>(),
workflow, workflow,
completedLoops: options.completedLoops || new Set<string>(), completedLoops: options.completedLoops || new Set<string>(),
parallelExecutions: options.parallelExecutions, parallelExecutions: options.parallelExecutions || new Map(),
parallelBlockMapping: options.parallelBlockMapping, parallelBlockMapping: options.parallelBlockMapping,
currentVirtualBlockId: options.currentVirtualBlockId, currentVirtualBlockId: options.currentVirtualBlockId,
} }
@@ -509,7 +507,7 @@ export const createLoopManagerMock = (options?: {
getLoopIndex: getLoopIndex:
options?.getLoopIndexImpl || options?.getLoopIndexImpl ||
vi.fn().mockImplementation((loopId, blockId, context) => { vi.fn().mockImplementation((loopId, blockId, context) => {
return context.loopIterations.get(loopId) || 0 return context.loopExecutions?.get(loopId)?.iteration || 0
}), }),
})), })),
}) })

View File

@@ -1,72 +1,42 @@
/**
* Central constants and types for the executor
*
* Consolidates all magic strings, block types, edge handles, and type definitions
* used throughout the executor to eliminate duplication and improve type safety.
*/
/**
* Block types
*/
export enum BlockType { export enum BlockType {
// Control flow
PARALLEL = 'parallel', PARALLEL = 'parallel',
LOOP = 'loop', LOOP = 'loop',
ROUTER = 'router', ROUTER = 'router',
CONDITION = 'condition', CONDITION = 'condition',
// Triggers
START_TRIGGER = 'start_trigger', START_TRIGGER = 'start_trigger',
STARTER = 'starter', STARTER = 'starter',
TRIGGER = 'trigger', TRIGGER = 'trigger',
// Data processing
FUNCTION = 'function', FUNCTION = 'function',
AGENT = 'agent', AGENT = 'agent',
API = 'api', API = 'api',
EVALUATOR = 'evaluator', EVALUATOR = 'evaluator',
VARIABLES = 'variables', VARIABLES = 'variables',
// I/O
RESPONSE = 'response', RESPONSE = 'response',
APPROVAL = 'approval',
WORKFLOW = 'workflow', WORKFLOW = 'workflow',
WORKFLOW_INPUT = 'workflow_input', WORKFLOW_INPUT = 'workflow_input',
// Utilities
WAIT = 'wait', WAIT = 'wait',
// Infrastructure (virtual blocks)
SENTINEL_START = 'sentinel_start', SENTINEL_START = 'sentinel_start',
SENTINEL_END = 'sentinel_end', SENTINEL_END = 'sentinel_end',
} }
/**
* Trigger block types (blocks that can start a workflow)
*/
export const TRIGGER_BLOCK_TYPES = [ export const TRIGGER_BLOCK_TYPES = [
BlockType.START_TRIGGER, BlockType.START_TRIGGER,
BlockType.STARTER, BlockType.STARTER,
BlockType.TRIGGER, BlockType.TRIGGER,
] as const ] as const
/**
* Metadata-only block types (not executable, just configuration)
*/
export const METADATA_ONLY_BLOCK_TYPES = [BlockType.LOOP, BlockType.PARALLEL] as const export const METADATA_ONLY_BLOCK_TYPES = [BlockType.LOOP, BlockType.PARALLEL] as const
/**
* Loop types
*/
export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile' export type LoopType = 'for' | 'forEach' | 'while' | 'doWhile'
/**
* Sentinel types
*/
export type SentinelType = 'start' | 'end' export type SentinelType = 'start' | 'end'
/**
* Parallel types
*/
export type ParallelType = 'collection' | 'count' export type ParallelType = 'collection' | 'count'
export const EDGE = { export const EDGE = {
@@ -82,11 +52,7 @@ export const EDGE = {
DEFAULT: 'default', DEFAULT: 'default',
} as const } as const
/**
* Loop configuration
*/
export const LOOP = { export const LOOP = {
// Loop types
TYPE: { TYPE: {
FOR: 'for' as LoopType, FOR: 'for' as LoopType,
FOR_EACH: 'forEach' as LoopType, FOR_EACH: 'forEach' as LoopType,
@@ -94,39 +60,31 @@ export const LOOP = {
DO_WHILE: 'doWhile', DO_WHILE: 'doWhile',
}, },
// Sentinel node naming
SENTINEL: { SENTINEL: {
PREFIX: 'loop-', PREFIX: 'loop-',
START_SUFFIX: '-sentinel-start', START_SUFFIX: '-sentinel-start',
END_SUFFIX: '-sentinel-end', END_SUFFIX: '-sentinel-end',
START_TYPE: 'start' as SentinelType, START_TYPE: 'start' as SentinelType,
END_TYPE: 'end' as SentinelType, END_TYPE: 'end' as SentinelType,
START_NAME_PREFIX: 'Loop Start',
END_NAME_PREFIX: 'Loop End',
}, },
} as const } as const
/**
* Parallel configuration
*/
export const PARALLEL = { export const PARALLEL = {
// Parallel types
TYPE: { TYPE: {
COLLECTION: 'collection' as ParallelType, COLLECTION: 'collection' as ParallelType,
COUNT: 'count' as ParallelType, COUNT: 'count' as ParallelType,
}, },
// Branch notation
BRANCH: { BRANCH: {
PREFIX: '₍', PREFIX: '₍',
SUFFIX: '₎', SUFFIX: '₎',
}, },
// Default values
DEFAULT_COUNT: 1, DEFAULT_COUNT: 1,
} as const } as const
/**
* Reference syntax for variable resolution
*/
export const REFERENCE = { export const REFERENCE = {
START: '<', START: '<',
END: '>', END: '>',
@@ -146,9 +104,6 @@ export const SPECIAL_REFERENCE_PREFIXES = [
REFERENCE.PREFIX.VARIABLE, REFERENCE.PREFIX.VARIABLE,
] as const ] as const
/**
* Loop reference fields
*/
export const LOOP_REFERENCE = { export const LOOP_REFERENCE = {
ITERATION: 'iteration', ITERATION: 'iteration',
INDEX: 'index', INDEX: 'index',
@@ -156,9 +111,6 @@ export const LOOP_REFERENCE = {
INDEX_PATH: 'loop.index', INDEX_PATH: 'loop.index',
} as const } as const
/**
* Parallel reference fields
*/
export const PARALLEL_REFERENCE = { export const PARALLEL_REFERENCE = {
INDEX: 'index', INDEX: 'index',
CURRENT_ITEM: 'currentItem', CURRENT_ITEM: 'currentItem',
@@ -223,15 +175,44 @@ export const CONDITION = {
ELSE_TITLE: 'else', ELSE_TITLE: 'else',
} as const } as const
export const PAUSE_RESUME = {
OPERATION: {
HUMAN: 'human',
API: 'api',
},
PATH: {
API_RESUME: '/api/resume',
UI_RESUME: '/resume',
},
} as const
export function buildResumeApiUrl(
baseUrl: string | undefined,
workflowId: string,
executionId: string,
contextId: string
): string {
const prefix = baseUrl ?? ''
return `${prefix}${PAUSE_RESUME.PATH.API_RESUME}/${workflowId}/${executionId}/${contextId}`
}
export function buildResumeUiUrl(
baseUrl: string | undefined,
workflowId: string,
executionId: string
): string {
const prefix = baseUrl ?? ''
return `${prefix}${PAUSE_RESUME.PATH.UI_RESUME}/${workflowId}/${executionId}`
}
export const PARSING = { export const PARSING = {
JSON_RADIX: 10, JSON_RADIX: 10,
PREVIEW_LENGTH: 200, PREVIEW_LENGTH: 200,
PREVIEW_SUFFIX: '...', PREVIEW_SUFFIX: '...',
} as const } as const
/** export type FieldType = 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' | 'plain'
* Condition configuration
*/
export interface ConditionConfig { export interface ConditionConfig {
id: string id: string
label?: string label?: string

View File

@@ -0,0 +1,68 @@
import { describe, expect, it, vi } from 'vitest'
import { BlockType } from '@/executor/consts'
import { DAGBuilder } from '@/executor/dag/builder'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn().mockReturnValue({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
}),
}))
function createBlock(id: string, metadataId: string): SerializedBlock {
return {
id,
position: { x: 0, y: 0 },
config: {
tool: 'noop',
params: {},
},
inputs: {},
outputs: {},
metadata: {
id: metadataId,
name: id,
},
enabled: true,
}
}
describe('DAGBuilder pause-resume transformation', () => {
it('creates trigger nodes and rewires edges for pause blocks', () => {
const workflow: SerializedWorkflow = {
version: '1',
blocks: [
createBlock('start', BlockType.STARTER),
createBlock('pause', BlockType.APPROVAL),
createBlock('finish', BlockType.FUNCTION),
],
connections: [
{ source: 'start', target: 'pause' },
{ source: 'pause', target: 'finish' },
],
loops: {},
}
const builder = new DAGBuilder()
const dag = builder.build(workflow)
const pauseNode = dag.nodes.get('pause')
expect(pauseNode).toBeDefined()
expect(pauseNode?.metadata.isPauseResponse).toBe(true)
const startNode = dag.nodes.get('start')!
const startOutgoing = Array.from(startNode.outgoingEdges.values())
expect(startOutgoing).toHaveLength(1)
expect(startOutgoing[0].target).toBe('pause')
const pauseOutgoing = Array.from(pauseNode!.outgoingEdges.values())
expect(pauseOutgoing).toHaveLength(1)
expect(pauseOutgoing[0].target).toBe('finish')
const triggerNode = dag.nodes.get('pause__trigger')
expect(triggerNode).toBeUndefined()
})
})

View File

@@ -1,15 +1,15 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { EdgeConstructor } from '@/executor/dag/construction/edges'
import { LoopConstructor } from '@/executor/dag/construction/loops'
import { NodeConstructor } from '@/executor/dag/construction/nodes'
import { PathConstructor } from '@/executor/dag/construction/paths'
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
import type { import type {
SerializedBlock, SerializedBlock,
SerializedLoop, SerializedLoop,
SerializedParallel, SerializedParallel,
SerializedWorkflow, SerializedWorkflow,
} from '@/serializer/types' } from '@/serializer/types'
import { EdgeConstructor } from './construction/edges'
import { LoopConstructor } from './construction/loops'
import { NodeConstructor } from './construction/nodes'
import { PathConstructor } from './construction/paths'
import type { DAGEdge, NodeMetadata } from './types'
const logger = createLogger('DAGBuilder') const logger = createLogger('DAGBuilder')
@@ -33,7 +33,11 @@ export class DAGBuilder {
private nodeConstructor = new NodeConstructor() private nodeConstructor = new NodeConstructor()
private edgeConstructor = new EdgeConstructor() private edgeConstructor = new EdgeConstructor()
build(workflow: SerializedWorkflow, triggerBlockId?: string): DAG { build(
workflow: SerializedWorkflow,
triggerBlockId?: string,
savedIncomingEdges?: Record<string, string[]>
): DAG {
const dag: DAG = { const dag: DAG = {
nodes: new Map(), nodes: new Map(),
loopConfigs: new Map(), loopConfigs: new Map(),
@@ -43,26 +47,46 @@ export class DAGBuilder {
this.initializeConfigs(workflow, dag) this.initializeConfigs(workflow, dag)
const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId) const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId)
logger.debug('Reachable blocks from trigger:', {
triggerBlockId,
reachableCount: reachableBlocks.size,
totalBlocks: workflow.blocks.length,
})
this.loopConstructor.execute(dag, reachableBlocks) this.loopConstructor.execute(dag, reachableBlocks)
const { blocksInLoops, blocksInParallels } = this.nodeConstructor.execute( const { blocksInLoops, blocksInParallels, pauseTriggerMapping } = this.nodeConstructor.execute(
workflow, workflow,
dag, dag,
reachableBlocks reachableBlocks
) )
this.edgeConstructor.execute(workflow, dag, blocksInParallels, blocksInLoops, reachableBlocks) this.edgeConstructor.execute(
workflow,
dag,
blocksInParallels,
blocksInLoops,
reachableBlocks,
pauseTriggerMapping
)
if (savedIncomingEdges) {
logger.info('Restoring DAG incoming edges from snapshot', {
nodeCount: Object.keys(savedIncomingEdges).length,
})
for (const [nodeId, incomingEdgeArray] of Object.entries(savedIncomingEdges)) {
const node = dag.nodes.get(nodeId)
if (node) {
node.incomingEdges = new Set(incomingEdgeArray)
}
}
}
logger.info('DAG built', { logger.info('DAG built', {
totalNodes: dag.nodes.size, totalNodes: dag.nodes.size,
loopCount: dag.loopConfigs.size, loopCount: dag.loopConfigs.size,
parallelCount: dag.parallelConfigs.size, parallelCount: dag.parallelConfigs.size,
allNodeIds: Array.from(dag.nodes.keys()),
triggerNodes: Array.from(dag.nodes.values())
.filter((n) => n.metadata?.isResumeTrigger)
.map((n) => ({ id: n.id, originalBlockId: n.metadata?.originalBlockId })),
}) })
return dag return dag

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { EDGE, isConditionBlockType, isRouterBlockType } from '@/executor/consts' import { EDGE, isConditionBlockType, isRouterBlockType } from '@/executor/consts'
import type { DAG } from '@/executor/dag/builder'
import { import {
buildBranchNodeId, buildBranchNodeId,
buildSentinelEndId, buildSentinelEndId,
@@ -9,7 +10,6 @@ import {
parseDistributionItems, parseDistributionItems,
} from '@/executor/utils/subflow-utils' } from '@/executor/utils/subflow-utils'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
import type { DAG } from '../builder'
const logger = createLogger('EdgeConstructor') const logger = createLogger('EdgeConstructor')
@@ -31,11 +31,13 @@ export class EdgeConstructor {
dag: DAG, dag: DAG,
blocksInParallels: Set<string>, blocksInParallels: Set<string>,
blocksInLoops: Set<string>, blocksInLoops: Set<string>,
reachableBlocks: Set<string> reachableBlocks: Set<string>,
pauseTriggerMapping: Map<string, string>
): void { ): void {
const loopBlockIds = new Set(dag.loopConfigs.keys()) const loopBlockIds = new Set(dag.loopConfigs.keys())
const parallelBlockIds = new Set(dag.parallelConfigs.keys()) const parallelBlockIds = new Set(dag.parallelConfigs.keys())
const metadata = this.buildMetadataMaps(workflow) const metadata = this.buildMetadataMaps(workflow)
this.wireRegularEdges( this.wireRegularEdges(
workflow, workflow,
dag, dag,
@@ -44,21 +46,26 @@ export class EdgeConstructor {
reachableBlocks, reachableBlocks,
loopBlockIds, loopBlockIds,
parallelBlockIds, parallelBlockIds,
metadata metadata,
pauseTriggerMapping
) )
this.wireLoopSentinels(dag, reachableBlocks) this.wireLoopSentinels(dag, reachableBlocks)
this.wireParallelBlocks(workflow, dag, loopBlockIds, parallelBlockIds) this.wireParallelBlocks(workflow, dag, loopBlockIds, parallelBlockIds, pauseTriggerMapping)
} }
private buildMetadataMaps(workflow: SerializedWorkflow): EdgeMetadata { private buildMetadataMaps(workflow: SerializedWorkflow): EdgeMetadata {
const blockTypeMap = new Map<string, string>() const blockTypeMap = new Map<string, string>()
const conditionConfigMap = new Map<string, ConditionConfig[]>() const conditionConfigMap = new Map<string, ConditionConfig[]>()
const routerBlockIds = new Set<string>() const routerBlockIds = new Set<string>()
for (const block of workflow.blocks) { for (const block of workflow.blocks) {
const blockType = block.metadata?.id ?? '' const blockType = block.metadata?.id ?? ''
blockTypeMap.set(block.id, blockType) blockTypeMap.set(block.id, blockType)
if (isConditionBlockType(blockType)) { if (isConditionBlockType(blockType)) {
const conditions = this.parseConditionConfig(block) const conditions = this.parseConditionConfig(block)
if (conditions) { if (conditions) {
conditionConfigMap.set(block.id, conditions) conditionConfigMap.set(block.id, conditions)
} }
@@ -66,24 +73,29 @@ export class EdgeConstructor {
routerBlockIds.add(block.id) routerBlockIds.add(block.id)
} }
} }
return { blockTypeMap, conditionConfigMap, routerBlockIds } return { blockTypeMap, conditionConfigMap, routerBlockIds }
} }
private parseConditionConfig(block: any): ConditionConfig[] | null { private parseConditionConfig(block: any): ConditionConfig[] | null {
try { try {
const conditionsJson = block.config.params?.conditions const conditionsJson = block.config.params?.conditions
if (typeof conditionsJson === 'string') { if (typeof conditionsJson === 'string') {
return JSON.parse(conditionsJson) return JSON.parse(conditionsJson)
} }
if (Array.isArray(conditionsJson)) { if (Array.isArray(conditionsJson)) {
return conditionsJson return conditionsJson
} }
return null return null
} catch (error) { } catch (error) {
logger.warn('Failed to parse condition config', { logger.warn('Failed to parse condition config', {
blockId: block.id, blockId: block.id,
error: error instanceof Error ? error.message : String(error), error: error instanceof Error ? error.message : String(error),
}) })
return null return null
} }
} }
@@ -96,21 +108,25 @@ export class EdgeConstructor {
workflow: SerializedWorkflow workflow: SerializedWorkflow
): string | undefined { ): string | undefined {
let handle = sourceHandle let handle = sourceHandle
if (!handle && isConditionBlockType(metadata.blockTypeMap.get(source) ?? '')) { if (!handle && isConditionBlockType(metadata.blockTypeMap.get(source) ?? '')) {
const conditions = metadata.conditionConfigMap.get(source) const conditions = metadata.conditionConfigMap.get(source)
if (conditions && conditions.length > 0) { if (conditions && conditions.length > 0) {
const edgesFromCondition = workflow.connections.filter((c) => c.source === source) const edgesFromCondition = workflow.connections.filter((c) => c.source === source)
const edgeIndex = edgesFromCondition.findIndex((e) => e.target === target) const edgeIndex = edgesFromCondition.findIndex((e) => e.target === target)
if (edgeIndex >= 0 && edgeIndex < conditions.length) { if (edgeIndex >= 0 && edgeIndex < conditions.length) {
const correspondingCondition = conditions[edgeIndex] const correspondingCondition = conditions[edgeIndex]
handle = `${EDGE.CONDITION_PREFIX}${correspondingCondition.id}` handle = `${EDGE.CONDITION_PREFIX}${correspondingCondition.id}`
} }
} }
} }
if (metadata.routerBlockIds.has(source)) { if (metadata.routerBlockIds.has(source)) {
handle = `${EDGE.ROUTER_PREFIX}${target}` handle = `${EDGE.ROUTER_PREFIX}${target}`
logger.debug('Set router sourceHandle', { source, target, sourceHandle: handle })
} }
return handle return handle
} }
@@ -122,10 +138,12 @@ export class EdgeConstructor {
reachableBlocks: Set<string>, reachableBlocks: Set<string>,
loopBlockIds: Set<string>, loopBlockIds: Set<string>,
parallelBlockIds: Set<string>, parallelBlockIds: Set<string>,
metadata: EdgeMetadata metadata: EdgeMetadata,
pauseTriggerMapping: Map<string, string>
): void { ): void {
for (const connection of workflow.connections) { for (const connection of workflow.connections) {
let { source, target } = connection let { source, target } = connection
const originalSource = source
let sourceHandle = this.generateSourceHandle( let sourceHandle = this.generateSourceHandle(
source, source,
target, target,
@@ -138,6 +156,7 @@ export class EdgeConstructor {
const targetIsLoopBlock = loopBlockIds.has(target) const targetIsLoopBlock = loopBlockIds.has(target)
const sourceIsParallelBlock = parallelBlockIds.has(source) const sourceIsParallelBlock = parallelBlockIds.has(source)
const targetIsParallelBlock = parallelBlockIds.has(target) const targetIsParallelBlock = parallelBlockIds.has(target)
if ( if (
sourceIsLoopBlock || sourceIsLoopBlock ||
targetIsLoopBlock || targetIsLoopBlock ||
@@ -146,38 +165,42 @@ export class EdgeConstructor {
) { ) {
if (sourceIsLoopBlock) { if (sourceIsLoopBlock) {
const sentinelEndId = buildSentinelEndId(source) const sentinelEndId = buildSentinelEndId(source)
if (!dag.nodes.has(sentinelEndId)) { if (!dag.nodes.has(sentinelEndId)) {
logger.debug('Skipping loop exit edge - sentinel not found', { source, target })
continue continue
} }
source = sentinelEndId source = sentinelEndId
sourceHandle = EDGE.LOOP_EXIT sourceHandle = EDGE.LOOP_EXIT
logger.debug('Redirected loop exit edge', { from: sentinelEndId, to: target })
} }
if (targetIsLoopBlock) { if (targetIsLoopBlock) {
const sentinelStartId = buildSentinelStartId(target) const sentinelStartId = buildSentinelStartId(target)
if (!dag.nodes.has(sentinelStartId)) { if (!dag.nodes.has(sentinelStartId)) {
logger.debug('Skipping loop entry edge - sentinel not found', { source, target })
continue continue
} }
target = sentinelStartId target = sentinelStartId
logger.debug('Redirected loop entry edge', { from: source, to: sentinelStartId })
} }
if (sourceIsParallelBlock || targetIsParallelBlock) { if (sourceIsParallelBlock || targetIsParallelBlock) {
continue continue
} }
} }
if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) { if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) {
logger.debug('Skipping edge that crosses loop boundary', { source, target })
continue continue
} }
if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) { if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) {
logger.debug('Skipping edge - not reachable', { source, target })
continue continue
} }
if (blocksInParallels.has(source) && blocksInParallels.has(target)) { if (blocksInParallels.has(source) && blocksInParallels.has(target)) {
const sourceParallelId = this.getParallelId(source, dag) const sourceParallelId = this.getParallelId(source, dag)
const targetParallelId = this.getParallelId(target, dag) const targetParallelId = this.getParallelId(target, dag)
if (sourceParallelId === targetParallelId) { if (sourceParallelId === targetParallelId) {
this.wireParallelInternalEdge( this.wireParallelInternalEdge(
source, source,
@@ -185,18 +208,16 @@ export class EdgeConstructor {
sourceParallelId!, sourceParallelId!,
dag, dag,
sourceHandle, sourceHandle,
targetHandle targetHandle,
pauseTriggerMapping
) )
} else { } else {
logger.warn('Edge between different parallels - invalid workflow', { source, target }) logger.warn('Edge between different parallels - invalid workflow', { source, target })
} }
} else if (blocksInParallels.has(source) || blocksInParallels.has(target)) { } else if (blocksInParallels.has(source) || blocksInParallels.has(target)) {
logger.debug('Skipping internal-to-external edge (handled by parallel wiring)', {
source,
target,
})
} else { } else {
this.addEdge(dag, source, target, sourceHandle, targetHandle) const resolvedSource = pauseTriggerMapping.get(originalSource) ?? source
this.addEdge(dag, resolvedSource, target, sourceHandle, targetHandle)
} }
} }
} }
@@ -204,27 +225,27 @@ export class EdgeConstructor {
private wireLoopSentinels(dag: DAG, reachableBlocks: Set<string>): void { private wireLoopSentinels(dag: DAG, reachableBlocks: Set<string>): void {
for (const [loopId, loopConfig] of dag.loopConfigs) { for (const [loopId, loopConfig] of dag.loopConfigs) {
const nodes = loopConfig.nodes const nodes = loopConfig.nodes
if (nodes.length === 0) continue if (nodes.length === 0) continue
const sentinelStartId = buildSentinelStartId(loopId) const sentinelStartId = buildSentinelStartId(loopId)
const sentinelEndId = buildSentinelEndId(loopId) const sentinelEndId = buildSentinelEndId(loopId)
if (!dag.nodes.has(sentinelStartId) || !dag.nodes.has(sentinelEndId)) { if (!dag.nodes.has(sentinelStartId) || !dag.nodes.has(sentinelEndId)) {
logger.debug('Skipping sentinel wiring for unreachable loop', { loopId })
continue continue
} }
const { startNodes, terminalNodes } = this.findLoopBoundaryNodes(nodes, dag, reachableBlocks) const { startNodes, terminalNodes } = this.findLoopBoundaryNodes(nodes, dag, reachableBlocks)
logger.debug('Wiring sentinel nodes for loop', {
loopId,
startNodes,
terminalNodes,
})
for (const startNodeId of startNodes) { for (const startNodeId of startNodes) {
this.addEdge(dag, sentinelStartId, startNodeId) this.addEdge(dag, sentinelStartId, startNodeId)
} }
for (const terminalNodeId of terminalNodes) { for (const terminalNodeId of terminalNodes) {
this.addEdge(dag, terminalNodeId, sentinelEndId) this.addEdge(dag, terminalNodeId, sentinelEndId)
} }
this.addEdge(dag, sentinelEndId, sentinelStartId, EDGE.LOOP_CONTINUE, undefined, true) this.addEdge(dag, sentinelEndId, sentinelStartId, EDGE.LOOP_CONTINUE, undefined, true)
logger.debug('Added backward edge for loop', { loopId })
} }
} }
@@ -232,26 +253,33 @@ export class EdgeConstructor {
workflow: SerializedWorkflow, workflow: SerializedWorkflow,
dag: DAG, dag: DAG,
loopBlockIds: Set<string>, loopBlockIds: Set<string>,
parallelBlockIds: Set<string> parallelBlockIds: Set<string>,
pauseTriggerMapping: Map<string, string>
): void { ): void {
for (const [parallelId, parallelConfig] of dag.parallelConfigs) { for (const [parallelId, parallelConfig] of dag.parallelConfigs) {
const nodes = parallelConfig.nodes const nodes = parallelConfig.nodes
if (nodes.length === 0) continue if (nodes.length === 0) continue
const { entryNodes, terminalNodes, branchCount } = this.findParallelBoundaryNodes( const { entryNodes, terminalNodes, branchCount } = this.findParallelBoundaryNodes(
nodes, nodes,
parallelId, parallelId,
dag dag
) )
logger.info('Wiring parallel block edges', { logger.info('Wiring parallel block edges', {
parallelId, parallelId,
entryNodes, entryNodes,
terminalNodes, terminalNodes,
branchCount, branchCount,
}) })
for (const connection of workflow.connections) { for (const connection of workflow.connections) {
const { source, target, sourceHandle, targetHandle } = connection const { source, target, sourceHandle, targetHandle } = connection
if (target === parallelId) { if (target === parallelId) {
if (loopBlockIds.has(source) || parallelBlockIds.has(source)) continue if (loopBlockIds.has(source) || parallelBlockIds.has(source)) continue
if (nodes.includes(source)) { if (nodes.includes(source)) {
logger.warn('Invalid: parallel block connected from its own internal node', { logger.warn('Invalid: parallel block connected from its own internal node', {
parallelId, parallelId,
@@ -259,18 +287,23 @@ export class EdgeConstructor {
}) })
continue continue
} }
logger.info('Wiring edge to parallel block', { source, parallelId, entryNodes }) logger.info('Wiring edge to parallel block', { source, parallelId, entryNodes })
for (const entryNodeId of entryNodes) { for (const entryNodeId of entryNodes) {
for (let i = 0; i < branchCount; i++) { for (let i = 0; i < branchCount; i++) {
const branchNodeId = buildBranchNodeId(entryNodeId, i) const branchNodeId = buildBranchNodeId(entryNodeId, i)
if (dag.nodes.has(branchNodeId)) { if (dag.nodes.has(branchNodeId)) {
this.addEdge(dag, source, branchNodeId, sourceHandle, targetHandle) this.addEdge(dag, source, branchNodeId, sourceHandle, targetHandle)
} }
} }
} }
} }
if (source === parallelId) { if (source === parallelId) {
if (loopBlockIds.has(target) || parallelBlockIds.has(target)) continue if (loopBlockIds.has(target) || parallelBlockIds.has(target)) continue
if (nodes.includes(target)) { if (nodes.includes(target)) {
logger.warn('Invalid: parallel block connected to its own internal node', { logger.warn('Invalid: parallel block connected to its own internal node', {
parallelId, parallelId,
@@ -278,12 +311,16 @@ export class EdgeConstructor {
}) })
continue continue
} }
logger.info('Wiring edge from parallel block', { parallelId, target, terminalNodes }) logger.info('Wiring edge from parallel block', { parallelId, target, terminalNodes })
for (const terminalNodeId of terminalNodes) { for (const terminalNodeId of terminalNodes) {
for (let i = 0; i < branchCount; i++) { for (let i = 0; i < branchCount; i++) {
const branchNodeId = buildBranchNodeId(terminalNodeId, i) const branchNodeId = buildBranchNodeId(terminalNodeId, i)
if (dag.nodes.has(branchNodeId)) { if (dag.nodes.has(branchNodeId)) {
this.addEdge(dag, branchNodeId, target, sourceHandle, targetHandle) const resolvedSourceId = pauseTriggerMapping.get(branchNodeId) ?? branchNodeId
this.addEdge(dag, resolvedSourceId, target, sourceHandle, targetHandle)
} }
} }
} }
@@ -300,22 +337,28 @@ export class EdgeConstructor {
): boolean { ): boolean {
const sourceInLoop = blocksInLoops.has(source) const sourceInLoop = blocksInLoops.has(source)
const targetInLoop = blocksInLoops.has(target) const targetInLoop = blocksInLoops.has(target)
if (sourceInLoop !== targetInLoop) { if (sourceInLoop !== targetInLoop) {
return true return true
} }
if (!sourceInLoop && !targetInLoop) { if (!sourceInLoop && !targetInLoop) {
return false return false
} }
let sourceLoopId: string | undefined let sourceLoopId: string | undefined
let targetLoopId: string | undefined let targetLoopId: string | undefined
for (const [loopId, loopConfig] of dag.loopConfigs) { for (const [loopId, loopConfig] of dag.loopConfigs) {
if (loopConfig.nodes.includes(source)) { if (loopConfig.nodes.includes(source)) {
sourceLoopId = loopId sourceLoopId = loopId
} }
if (loopConfig.nodes.includes(target)) { if (loopConfig.nodes.includes(target)) {
targetLoopId = loopId targetLoopId = loopId
} }
} }
return sourceLoopId !== targetLoopId return sourceLoopId !== targetLoopId
} }
@@ -340,18 +383,23 @@ export class EdgeConstructor {
parallelId: string, parallelId: string,
dag: DAG, dag: DAG,
sourceHandle?: string, sourceHandle?: string,
targetHandle?: string targetHandle?: string,
pauseTriggerMapping?: Map<string, string>
): void { ): void {
const parallelConfig = dag.parallelConfigs.get(parallelId) const parallelConfig = dag.parallelConfigs.get(parallelId)
if (!parallelConfig) { if (!parallelConfig) {
throw new Error(`Parallel config not found: ${parallelId}`) throw new Error(`Parallel config not found: ${parallelId}`)
} }
const distributionItems = parseDistributionItems(parallelConfig) const distributionItems = parseDistributionItems(parallelConfig)
const count = calculateBranchCount(parallelConfig, distributionItems) const count = calculateBranchCount(parallelConfig, distributionItems)
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
const sourceNodeId = buildBranchNodeId(source, i) const sourceNodeId = buildBranchNodeId(source, i)
const targetNodeId = buildBranchNodeId(target, i) const targetNodeId = buildBranchNodeId(target, i)
this.addEdge(dag, sourceNodeId, targetNodeId, sourceHandle, targetHandle) const resolvedSourceId = pauseTriggerMapping?.get(sourceNodeId) ?? sourceNodeId
this.addEdge(dag, resolvedSourceId, targetNodeId, sourceHandle, targetHandle)
} }
} }
@@ -363,34 +411,45 @@ export class EdgeConstructor {
const nodesSet = new Set(nodes) const nodesSet = new Set(nodes)
const startNodesSet = new Set<string>() const startNodesSet = new Set<string>()
const terminalNodesSet = new Set<string>() const terminalNodesSet = new Set<string>()
for (const nodeId of nodes) { for (const nodeId of nodes) {
const node = dag.nodes.get(nodeId) const node = dag.nodes.get(nodeId)
if (!node) continue if (!node) continue
let hasIncomingFromLoop = false let hasIncomingFromLoop = false
for (const incomingNodeId of node.incomingEdges) { for (const incomingNodeId of node.incomingEdges) {
if (nodesSet.has(incomingNodeId)) { if (nodesSet.has(incomingNodeId)) {
hasIncomingFromLoop = true hasIncomingFromLoop = true
break break
} }
} }
if (!hasIncomingFromLoop) { if (!hasIncomingFromLoop) {
startNodesSet.add(nodeId) startNodesSet.add(nodeId)
} }
} }
for (const nodeId of nodes) { for (const nodeId of nodes) {
const node = dag.nodes.get(nodeId) const node = dag.nodes.get(nodeId)
if (!node) continue if (!node) continue
let hasOutgoingToLoop = false let hasOutgoingToLoop = false
for (const [_, edge] of node.outgoingEdges) { for (const [_, edge] of node.outgoingEdges) {
if (nodesSet.has(edge.target)) { if (nodesSet.has(edge.target)) {
hasOutgoingToLoop = true hasOutgoingToLoop = true
break break
} }
} }
if (!hasOutgoingToLoop) { if (!hasOutgoingToLoop) {
terminalNodesSet.add(nodeId) terminalNodesSet.add(nodeId)
} }
} }
return { return {
startNodes: Array.from(startNodesSet), startNodes: Array.from(startNodesSet),
terminalNodes: Array.from(terminalNodesSet), terminalNodes: Array.from(terminalNodesSet),
@@ -406,59 +465,80 @@ export class EdgeConstructor {
const entryNodesSet = new Set<string>() const entryNodesSet = new Set<string>()
const terminalNodesSet = new Set<string>() const terminalNodesSet = new Set<string>()
const parallelConfig = dag.parallelConfigs.get(parallelId) const parallelConfig = dag.parallelConfigs.get(parallelId)
if (!parallelConfig) { if (!parallelConfig) {
throw new Error(`Parallel config not found: ${parallelId}`) throw new Error(`Parallel config not found: ${parallelId}`)
} }
const distributionItems = parseDistributionItems(parallelConfig) const distributionItems = parseDistributionItems(parallelConfig)
const branchCount = calculateBranchCount(parallelConfig, distributionItems) const branchCount = calculateBranchCount(parallelConfig, distributionItems)
for (const nodeId of nodes) { for (const nodeId of nodes) {
let hasAnyBranch = false let hasAnyBranch = false
for (let i = 0; i < branchCount; i++) { for (let i = 0; i < branchCount; i++) {
if (dag.nodes.has(buildBranchNodeId(nodeId, i))) { if (dag.nodes.has(buildBranchNodeId(nodeId, i))) {
hasAnyBranch = true hasAnyBranch = true
break break
} }
} }
if (!hasAnyBranch) continue if (!hasAnyBranch) continue
const firstBranchId = buildBranchNodeId(nodeId, 0) const firstBranchId = buildBranchNodeId(nodeId, 0)
const firstBranchNode = dag.nodes.get(firstBranchId) const firstBranchNode = dag.nodes.get(firstBranchId)
if (!firstBranchNode) continue if (!firstBranchNode) continue
let hasIncomingFromParallel = false let hasIncomingFromParallel = false
for (const incomingNodeId of firstBranchNode.incomingEdges) { for (const incomingNodeId of firstBranchNode.incomingEdges) {
const originalNodeId = extractBaseBlockId(incomingNodeId) const originalNodeId = extractBaseBlockId(incomingNodeId)
if (nodesSet.has(originalNodeId)) { if (nodesSet.has(originalNodeId)) {
hasIncomingFromParallel = true hasIncomingFromParallel = true
break break
} }
} }
if (!hasIncomingFromParallel) { if (!hasIncomingFromParallel) {
entryNodesSet.add(nodeId) entryNodesSet.add(nodeId)
} }
} }
for (const nodeId of nodes) { for (const nodeId of nodes) {
let hasAnyBranch = false let hasAnyBranch = false
for (let i = 0; i < branchCount; i++) { for (let i = 0; i < branchCount; i++) {
if (dag.nodes.has(buildBranchNodeId(nodeId, i))) { if (dag.nodes.has(buildBranchNodeId(nodeId, i))) {
hasAnyBranch = true hasAnyBranch = true
break break
} }
} }
if (!hasAnyBranch) continue if (!hasAnyBranch) continue
const firstBranchId = buildBranchNodeId(nodeId, 0) const firstBranchId = buildBranchNodeId(nodeId, 0)
const firstBranchNode = dag.nodes.get(firstBranchId) const firstBranchNode = dag.nodes.get(firstBranchId)
if (!firstBranchNode) continue if (!firstBranchNode) continue
let hasOutgoingToParallel = false let hasOutgoingToParallel = false
for (const [_, edge] of firstBranchNode.outgoingEdges) { for (const [_, edge] of firstBranchNode.outgoingEdges) {
const originalTargetId = extractBaseBlockId(edge.target) const originalTargetId = extractBaseBlockId(edge.target)
if (nodesSet.has(originalTargetId)) { if (nodesSet.has(originalTargetId)) {
hasOutgoingToParallel = true hasOutgoingToParallel = true
break break
} }
} }
if (!hasOutgoingToParallel) { if (!hasOutgoingToParallel) {
terminalNodesSet.add(nodeId) terminalNodesSet.add(nodeId)
} }
} }
return { return {
entryNodes: Array.from(entryNodesSet), entryNodes: Array.from(entryNodesSet),
terminalNodes: Array.from(terminalNodesSet), terminalNodes: Array.from(terminalNodesSet),
@@ -485,25 +565,23 @@ export class EdgeConstructor {
): void { ): void {
const sourceNode = dag.nodes.get(sourceId) const sourceNode = dag.nodes.get(sourceId)
const targetNode = dag.nodes.get(targetId) const targetNode = dag.nodes.get(targetId)
if (!sourceNode || !targetNode) { if (!sourceNode || !targetNode) {
logger.warn('Edge references non-existent node', { sourceId, targetId }) logger.warn('Edge references non-existent node', { sourceId, targetId })
return return
} }
const edgeId = `${sourceId}${targetId}` const edgeId = `${sourceId}${targetId}`
sourceNode.outgoingEdges.set(edgeId, { sourceNode.outgoingEdges.set(edgeId, {
target: targetId, target: targetId,
sourceHandle, sourceHandle,
targetHandle, targetHandle,
isActive: isLoopBackEdge ? false : undefined, isActive: isLoopBackEdge ? false : undefined,
}) })
if (!isLoopBackEdge) { if (!isLoopBackEdge) {
targetNode.incomingEdges.add(sourceId) targetNode.incomingEdges.add(sourceId)
logger.debug('Added incoming edge', { from: sourceId, to: targetId })
} else {
logger.debug('Skipped adding backwards-edge to incomingEdges', {
from: sourceId,
to: targetId,
})
} }
} }
} }

View File

@@ -1,7 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, LOOP, type SentinelType } from '@/executor/consts' import { BlockType, LOOP, type SentinelType } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import { buildSentinelEndId, buildSentinelStartId } from '@/executor/utils/subflow-utils' import { buildSentinelEndId, buildSentinelStartId } from '@/executor/utils/subflow-utils'
import type { DAG, DAGNode } from '../builder'
const logger = createLogger('LoopConstructor') const logger = createLogger('LoopConstructor')
@@ -9,16 +9,19 @@ export class LoopConstructor {
execute(dag: DAG, reachableBlocks: Set<string>): void { execute(dag: DAG, reachableBlocks: Set<string>): void {
for (const [loopId, loopConfig] of dag.loopConfigs) { for (const [loopId, loopConfig] of dag.loopConfigs) {
const loopNodes = loopConfig.nodes const loopNodes = loopConfig.nodes
if (loopNodes.length === 0) { if (loopNodes.length === 0) {
continue continue
} }
if (!this.hasReachableNodes(loopNodes, reachableBlocks)) { if (!this.hasReachableNodes(loopNodes, reachableBlocks)) {
logger.debug('Skipping sentinel creation for unreachable loop', { loopId })
continue continue
} }
this.createSentinelPair(dag, loopId) this.createSentinelPair(dag, loopId)
} }
} }
private hasReachableNodes(loopNodes: string[], reachableBlocks: Set<string>): boolean { private hasReachableNodes(loopNodes: string[], reachableBlocks: Set<string>): boolean {
return loopNodes.some((nodeId) => reachableBlocks.has(nodeId)) return loopNodes.some((nodeId) => reachableBlocks.has(nodeId))
} }
@@ -26,6 +29,7 @@ export class LoopConstructor {
private createSentinelPair(dag: DAG, loopId: string): void { private createSentinelPair(dag: DAG, loopId: string): void {
const startId = buildSentinelStartId(loopId) const startId = buildSentinelStartId(loopId)
const endId = buildSentinelEndId(loopId) const endId = buildSentinelEndId(loopId)
dag.nodes.set( dag.nodes.set(
startId, startId,
this.createSentinelNode({ this.createSentinelNode({
@@ -33,9 +37,10 @@ export class LoopConstructor {
loopId, loopId,
sentinelType: LOOP.SENTINEL.START_TYPE, sentinelType: LOOP.SENTINEL.START_TYPE,
blockType: BlockType.SENTINEL_START, blockType: BlockType.SENTINEL_START,
name: `Loop Start (${loopId})`, name: `${LOOP.SENTINEL.START_NAME_PREFIX} (${loopId})`,
}) })
) )
dag.nodes.set( dag.nodes.set(
endId, endId,
this.createSentinelNode({ this.createSentinelNode({
@@ -43,15 +48,9 @@ export class LoopConstructor {
loopId, loopId,
sentinelType: LOOP.SENTINEL.END_TYPE, sentinelType: LOOP.SENTINEL.END_TYPE,
blockType: BlockType.SENTINEL_END, blockType: BlockType.SENTINEL_END,
name: `Loop End (${loopId})`, name: `${LOOP.SENTINEL.END_NAME_PREFIX} (${loopId})`,
}) })
) )
logger.debug('Created sentinel pair for loop', {
loopId,
startId,
endId,
})
} }
private createSentinelNode(config: { private createSentinelNode(config: {

View File

@@ -1,14 +1,12 @@
import { createLogger } from '@/lib/logs/console/logger' import { BlockType, isMetadataOnlyBlockType } from '@/executor/consts'
import { isMetadataOnlyBlockType } from '@/executor/consts' import type { DAG, DAGNode } from '@/executor/dag/builder'
import { import {
buildBranchNodeId, buildBranchNodeId,
calculateBranchCount, calculateBranchCount,
parseDistributionItems, parseDistributionItems,
} from '@/executor/utils/subflow-utils' } from '@/executor/utils/subflow-utils'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
import type { DAG, DAGNode } from '../builder'
const logger = createLogger('NodeConstructor')
interface ParallelExpansion { interface ParallelExpansion {
parallelId: string parallelId: string
branchCount: number branchCount: number
@@ -20,39 +18,47 @@ export class NodeConstructor {
workflow: SerializedWorkflow, workflow: SerializedWorkflow,
dag: DAG, dag: DAG,
reachableBlocks: Set<string> reachableBlocks: Set<string>
): { blocksInLoops: Set<string>; blocksInParallels: Set<string> } { ): {
blocksInLoops: Set<string>
blocksInParallels: Set<string>
pauseTriggerMapping: Map<string, string>
} {
const blocksInLoops = new Set<string>() const blocksInLoops = new Set<string>()
const blocksInParallels = new Set<string>() const blocksInParallels = new Set<string>()
const pauseTriggerMapping = new Map<string, string>()
this.categorizeBlocks(dag, reachableBlocks, blocksInLoops, blocksInParallels) this.categorizeBlocks(dag, reachableBlocks, blocksInLoops, blocksInParallels)
for (const block of workflow.blocks) { for (const block of workflow.blocks) {
if (!this.shouldProcessBlock(block, reachableBlocks)) { if (!this.shouldProcessBlock(block, reachableBlocks)) {
continue continue
} }
const parallelId = this.findParallelForBlock(block.id, dag) const parallelId = this.findParallelForBlock(block.id, dag)
if (parallelId) { if (parallelId) {
this.createParallelBranchNodes(block, parallelId, dag) this.createParallelBranchNodes(block, parallelId, dag)
} else { } else {
this.createRegularOrLoopNode(block, blocksInLoops, dag) this.createRegularOrLoopNode(block, blocksInLoops, dag)
} }
} }
return { blocksInLoops, blocksInParallels }
return { blocksInLoops, blocksInParallels, pauseTriggerMapping }
} }
private shouldProcessBlock(block: SerializedBlock, reachableBlocks: Set<string>): boolean { private shouldProcessBlock(block: SerializedBlock, reachableBlocks: Set<string>): boolean {
if (!block.enabled) { if (!block.enabled) {
return false return false
} }
if (!reachableBlocks.has(block.id)) { if (!reachableBlocks.has(block.id)) {
logger.debug('Skipping unreachable block', { blockId: block.id })
return false return false
} }
if (isMetadataOnlyBlockType(block.metadata?.id)) { if (isMetadataOnlyBlockType(block.metadata?.id)) {
logger.debug('Skipping metadata-only block', {
blockId: block.id,
blockType: block.metadata?.id,
})
return false return false
} }
return true return true
} }
@@ -96,11 +102,7 @@ export class NodeConstructor {
private createParallelBranchNodes(block: SerializedBlock, parallelId: string, dag: DAG): void { private createParallelBranchNodes(block: SerializedBlock, parallelId: string, dag: DAG): void {
const expansion = this.calculateParallelExpansion(parallelId, dag) const expansion = this.calculateParallelExpansion(parallelId, dag)
logger.debug('Creating parallel branches', {
blockId: block.id,
parallelId: expansion.parallelId,
branchCount: expansion.branchCount,
})
for (let branchIndex = 0; branchIndex < expansion.branchCount; branchIndex++) { for (let branchIndex = 0; branchIndex < expansion.branchCount; branchIndex++) {
const branchNode = this.createParallelBranchNode(block, branchIndex, expansion) const branchNode = this.createParallelBranchNode(block, branchIndex, expansion)
dag.nodes.set(branchNode.id, branchNode) dag.nodes.set(branchNode.id, branchNode)
@@ -109,11 +111,14 @@ export class NodeConstructor {
private calculateParallelExpansion(parallelId: string, dag: DAG): ParallelExpansion { private calculateParallelExpansion(parallelId: string, dag: DAG): ParallelExpansion {
const config = dag.parallelConfigs.get(parallelId) const config = dag.parallelConfigs.get(parallelId)
if (!config) { if (!config) {
throw new Error(`Parallel config not found: ${parallelId}`) throw new Error(`Parallel config not found: ${parallelId}`)
} }
const distributionItems = parseDistributionItems(config) const distributionItems = parseDistributionItems(config)
const branchCount = calculateBranchCount(config, distributionItems) const branchCount = calculateBranchCount(config, distributionItems)
return { return {
parallelId, parallelId,
branchCount, branchCount,
@@ -127,9 +132,13 @@ export class NodeConstructor {
expansion: ParallelExpansion expansion: ParallelExpansion
): DAGNode { ): DAGNode {
const branchNodeId = buildBranchNodeId(baseBlock.id, branchIndex) const branchNodeId = buildBranchNodeId(baseBlock.id, branchIndex)
const blockClone: SerializedBlock = {
...baseBlock,
id: branchNodeId,
}
return { return {
id: branchNodeId, id: branchNodeId,
block: { ...baseBlock }, block: blockClone,
incomingEdges: new Set(), incomingEdges: new Set(),
outgoingEdges: new Map(), outgoingEdges: new Map(),
metadata: { metadata: {
@@ -138,6 +147,8 @@ export class NodeConstructor {
branchIndex, branchIndex,
branchTotal: expansion.branchCount, branchTotal: expansion.branchCount,
distributionItem: expansion.distributionItems[branchIndex], distributionItem: expansion.distributionItems[branchIndex],
isPauseResponse: baseBlock.metadata?.id === BlockType.APPROVAL,
originalBlockId: baseBlock.id,
}, },
} }
} }
@@ -149,6 +160,8 @@ export class NodeConstructor {
): void { ): void {
const isLoopNode = blocksInLoops.has(block.id) const isLoopNode = blocksInLoops.has(block.id)
const loopId = isLoopNode ? this.findLoopIdForBlock(block.id, dag) : undefined const loopId = isLoopNode ? this.findLoopIdForBlock(block.id, dag) : undefined
const isPauseBlock = block.metadata?.id === BlockType.APPROVAL
dag.nodes.set(block.id, { dag.nodes.set(block.id, {
id: block.id, id: block.id,
block, block,
@@ -157,10 +170,50 @@ export class NodeConstructor {
metadata: { metadata: {
isLoopNode, isLoopNode,
loopId, loopId,
isPauseResponse: isPauseBlock,
originalBlockId: block.id,
}, },
}) })
} }
private createTriggerNode(
block: SerializedBlock,
triggerId: string,
options: {
loopId?: string
isParallelBranch?: boolean
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): DAGNode {
const triggerBlock: SerializedBlock = {
...block,
id: triggerId,
enabled: true,
metadata: {
...block.metadata,
id: BlockType.START_TRIGGER,
},
}
return {
id: triggerId,
block: triggerBlock,
incomingEdges: new Set(),
outgoingEdges: new Map(),
metadata: {
isResumeTrigger: true,
originalBlockId: block.id,
loopId: options.loopId,
isParallelBranch: options.isParallelBranch,
parallelId: options.parallelId,
branchIndex: options.branchIndex,
branchTotal: options.branchTotal,
},
}
}
private findLoopIdForBlock(blockId: string, dag: DAG): string | undefined { private findLoopIdForBlock(blockId: string, dag: DAG): string | undefined {
for (const [loopId, loopConfig] of dag.loopConfigs) { for (const [loopId, loopConfig] of dag.loopConfigs) {
if (loopConfig.nodes.includes(blockId)) { if (loopConfig.nodes.includes(blockId)) {

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { isMetadataOnlyBlockType, isTriggerBlockType } from '@/executor/consts' import { isMetadataOnlyBlockType, isTriggerBlockType } from '@/executor/consts'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
const logger = createLogger('PathConstructor') const logger = createLogger('PathConstructor')
@@ -7,18 +8,15 @@ const logger = createLogger('PathConstructor')
export class PathConstructor { export class PathConstructor {
execute(workflow: SerializedWorkflow, triggerBlockId?: string): Set<string> { execute(workflow: SerializedWorkflow, triggerBlockId?: string): Set<string> {
const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId) const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId)
if (!resolvedTriggerId) { if (!resolvedTriggerId) {
logger.warn('No trigger block found, including all enabled blocks as fallback') logger.warn('No trigger block found, including all enabled blocks as fallback')
return this.getAllEnabledBlocks(workflow) return this.getAllEnabledBlocks(workflow)
} }
logger.debug('Starting reachability traversal', { triggerBlockId: resolvedTriggerId })
const adjacency = this.buildAdjacencyMap(workflow) const adjacency = this.buildAdjacencyMap(workflow)
const reachable = this.performBFS(resolvedTriggerId, adjacency) const reachable = this.performBFS(resolvedTriggerId, adjacency)
logger.debug('Reachability analysis complete', {
triggerBlockId: resolvedTriggerId,
reachableCount: reachable.size,
totalBlocks: workflow.blocks.length,
})
return reachable return reachable
} }
@@ -28,39 +26,43 @@ export class PathConstructor {
): string | undefined { ): string | undefined {
if (triggerBlockId) { if (triggerBlockId) {
const block = workflow.blocks.find((b) => b.id === triggerBlockId) const block = workflow.blocks.find((b) => b.id === triggerBlockId)
if (!block) {
if (block) {
return triggerBlockId
}
const fallbackTriggerId = this.resolveResumeTriggerFallback(triggerBlockId, workflow)
if (fallbackTriggerId) {
return fallbackTriggerId
}
logger.error('Provided triggerBlockId not found in workflow', { logger.error('Provided triggerBlockId not found in workflow', {
triggerBlockId, triggerBlockId,
availableBlocks: workflow.blocks.map((b) => ({ id: b.id, type: b.metadata?.id })), availableBlocks: workflow.blocks.map((b) => ({ id: b.id, type: b.metadata?.id })),
}) })
throw new Error(`Trigger block not found: ${triggerBlockId}`) throw new Error(`Trigger block not found: ${triggerBlockId}`)
} }
logger.debug('Using explicitly provided trigger block', {
triggerBlockId,
blockType: block.metadata?.id,
})
return triggerBlockId
}
const explicitTrigger = this.findExplicitTrigger(workflow) const explicitTrigger = this.findExplicitTrigger(workflow)
if (explicitTrigger) { if (explicitTrigger) {
return explicitTrigger return explicitTrigger
} }
const rootBlock = this.findRootBlock(workflow) const rootBlock = this.findRootBlock(workflow)
if (rootBlock) { if (rootBlock) {
return rootBlock return rootBlock
} }
return undefined return undefined
} }
private findExplicitTrigger(workflow: SerializedWorkflow): string | undefined { private findExplicitTrigger(workflow: SerializedWorkflow): string | undefined {
for (const block of workflow.blocks) { for (const block of workflow.blocks) {
if (block.enabled && this.isTriggerBlock(block)) { if (block.enabled && this.isTriggerBlock(block)) {
logger.debug('Found explicit trigger block', {
blockId: block.id,
blockType: block.metadata?.id,
})
return block.id return block.id
} }
} }
@@ -69,40 +71,37 @@ export class PathConstructor {
private findRootBlock(workflow: SerializedWorkflow): string | undefined { private findRootBlock(workflow: SerializedWorkflow): string | undefined {
const hasIncoming = new Set(workflow.connections.map((c) => c.target)) const hasIncoming = new Set(workflow.connections.map((c) => c.target))
for (const block of workflow.blocks) { for (const block of workflow.blocks) {
if ( if (
!hasIncoming.has(block.id) && !hasIncoming.has(block.id) &&
block.enabled && block.enabled &&
!isMetadataOnlyBlockType(block.metadata?.id) !isMetadataOnlyBlockType(block.metadata?.id)
) { ) {
logger.debug('Found root block (no incoming connections)', {
blockId: block.id,
blockType: block.metadata?.id,
})
return block.id return block.id
} }
} }
return undefined return undefined
} }
private isTriggerBlock(block: SerializedBlock): boolean { private isTriggerBlock(block: SerializedBlock): boolean {
return isTriggerBlockType(block.metadata?.id) return isTriggerBlockType(block.metadata?.id)
} }
private getAllEnabledBlocks(workflow: SerializedWorkflow): Set<string> { private getAllEnabledBlocks(workflow: SerializedWorkflow): Set<string> {
return new Set(workflow.blocks.filter((b) => b.enabled).map((b) => b.id)) return new Set(workflow.blocks.filter((b) => b.enabled).map((b) => b.id))
} }
private buildAdjacencyMap(workflow: SerializedWorkflow): Map<string, string[]> { private buildAdjacencyMap(workflow: SerializedWorkflow): Map<string, string[]> {
const adjacency = new Map<string, string[]>() const adjacency = new Map<string, string[]>()
for (const connection of workflow.connections) { for (const connection of workflow.connections) {
const neighbors = adjacency.get(connection.source) ?? [] const neighbors = adjacency.get(connection.source) ?? []
neighbors.push(connection.target) neighbors.push(connection.target)
adjacency.set(connection.source, neighbors) adjacency.set(connection.source, neighbors)
} }
logger.debug('Built adjacency map', {
nodeCount: adjacency.size,
connectionCount: workflow.connections.length,
})
return adjacency return adjacency
} }
@@ -110,43 +109,44 @@ export class PathConstructor {
const reachable = new Set<string>([triggerBlockId]) const reachable = new Set<string>([triggerBlockId])
const queue = [triggerBlockId] const queue = [triggerBlockId]
logger.debug('Starting BFS traversal', {
triggerBlockId,
adjacencyMapSize: adjacency.size,
adjacencyEntries: Array.from(adjacency.entries()).map(([source, targets]) => ({
source,
targets,
})),
})
while (queue.length > 0) { while (queue.length > 0) {
const currentBlockId = queue.shift() const currentBlockId = queue.shift()
if (!currentBlockId) break if (!currentBlockId) break
const neighbors = adjacency.get(currentBlockId) ?? [] const neighbors = adjacency.get(currentBlockId) ?? []
logger.debug('BFS processing node', {
currentBlockId,
neighbors,
neighborCount: neighbors.length,
})
for (const neighborId of neighbors) { for (const neighborId of neighbors) {
if (!reachable.has(neighborId)) { if (!reachable.has(neighborId)) {
logger.debug('BFS found new reachable node', {
from: currentBlockId,
to: neighborId,
})
reachable.add(neighborId) reachable.add(neighborId)
queue.push(neighborId) queue.push(neighborId)
} }
} }
} }
logger.debug('BFS traversal complete', {
triggerBlockId,
reachableCount: reachable.size,
reachableBlocks: Array.from(reachable),
})
return reachable return reachable
} }
private resolveResumeTriggerFallback(
triggerBlockId: string,
workflow: SerializedWorkflow
): string | undefined {
if (!triggerBlockId.endsWith('__trigger')) {
return undefined
}
const baseId = triggerBlockId.replace(/__trigger$/, '')
const normalizedBaseId = extractBaseBlockId(baseId)
const candidates = baseId === normalizedBaseId ? [baseId] : [baseId, normalizedBaseId]
for (const candidate of candidates) {
const block = workflow.blocks.find((b) => b.id === candidate)
if (block) {
return candidate
}
}
return undefined
}
} }

View File

@@ -7,7 +7,7 @@ export interface DAGEdge {
export interface NodeMetadata { export interface NodeMetadata {
isParallelBranch?: boolean isParallelBranch?: boolean
parallelId?: string // Which parallel this branch belongs to parallelId?: string
branchIndex?: number branchIndex?: number
branchTotal?: number branchTotal?: number
distributionItem?: unknown distributionItem?: unknown
@@ -15,4 +15,7 @@ export interface NodeMetadata {
loopId?: string loopId?: string
isSentinel?: boolean isSentinel?: boolean
sentinelType?: 'start' | 'end' sentinelType?: 'start' | 'end'
isPauseResponse?: boolean
isResumeTrigger?: boolean
originalBlockId?: string
} }

View File

@@ -1,17 +1,30 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { DEFAULTS, EDGE, isSentinelBlockType } from '@/executor/consts' import { getBaseUrl } from '@/lib/urls/utils'
import {
BlockType,
buildResumeApiUrl,
buildResumeUiUrl,
DEFAULTS,
EDGE,
isSentinelBlockType,
} from '@/executor/consts'
import type { DAGNode } from '@/executor/dag/builder'
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
import {
generatePauseContextId,
mapNodeMetadataToPauseScopes,
} from '@/executor/pause-resume/utils.ts'
import type { import type {
BlockHandler, BlockHandler,
BlockLog, BlockLog,
BlockState,
ExecutionContext, ExecutionContext,
NormalizedBlockOutput, NormalizedBlockOutput,
} from '@/executor/types' } from '@/executor/types'
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedBlock } from '@/serializer/types' import type { SerializedBlock } from '@/serializer/types'
import type { SubflowType } from '@/stores/workflows/workflow/types' import type { SubflowType } from '@/stores/workflows/workflow/types'
import type { DAGNode } from '../dag/builder'
import type { VariableResolver } from '../variables/resolver'
import type { ExecutionState } from './state'
import type { ContextExtensions } from './types'
const logger = createLogger('BlockExecutor') const logger = createLogger('BlockExecutor')
@@ -20,7 +33,7 @@ export class BlockExecutor {
private blockHandlers: BlockHandler[], private blockHandlers: BlockHandler[],
private resolver: VariableResolver, private resolver: VariableResolver,
private contextExtensions: ContextExtensions, private contextExtensions: ContextExtensions,
private state?: ExecutionState private state: BlockStateWriter
) {} ) {}
async execute( async execute(
@@ -30,7 +43,11 @@ export class BlockExecutor {
): Promise<NormalizedBlockOutput> { ): Promise<NormalizedBlockOutput> {
const handler = this.findHandler(block) const handler = this.findHandler(block)
if (!handler) { if (!handler) {
throw new Error(`No handler found for block type: ${block.metadata?.id}`) throw buildBlockExecutionError({
block,
context: ctx,
error: `No handler found for block type: ${block.metadata?.id ?? 'unknown'}`,
})
} }
const isSentinel = isSentinelBlockType(block.metadata?.id ?? '') const isSentinel = isSentinelBlockType(block.metadata?.id ?? '')
@@ -45,9 +62,23 @@ export class BlockExecutor {
const startTime = Date.now() const startTime = Date.now()
let resolvedInputs: Record<string, any> = {} let resolvedInputs: Record<string, any> = {}
const nodeMetadata = this.buildNodeMetadata(node)
let cleanupSelfReference: (() => void) | undefined
if (block.metadata?.id === BlockType.APPROVAL) {
cleanupSelfReference = this.preparePauseResumeSelfReference(ctx, node, block, nodeMetadata)
}
try { try {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block) resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
const output = await handler.execute(ctx, block, resolvedInputs) } finally {
cleanupSelfReference?.()
}
try {
const output = handler.executeWithNode
? await handler.executeWithNode(ctx, block, resolvedInputs, nodeMetadata)
: await handler.execute(ctx, block, resolvedInputs)
const isStreamingExecution = const isStreamingExecution =
output && typeof output === 'object' && 'stream' in output && 'execution' in output output && typeof output === 'object' && 'stream' in output && 'execution' in output
@@ -65,7 +96,7 @@ export class BlockExecutor {
} }
normalizedOutput = this.normalizeOutput( normalizedOutput = this.normalizeOutput(
streamingExec.execution.output || streamingExec.execution streamingExec.execution.output ?? streamingExec.execution
) )
} else { } else {
normalizedOutput = this.normalizeOutput(output) normalizedOutput = this.normalizeOutput(output)
@@ -77,23 +108,20 @@ export class BlockExecutor {
blockLog.endedAt = new Date().toISOString() blockLog.endedAt = new Date().toISOString()
blockLog.durationMs = duration blockLog.durationMs = duration
blockLog.success = true blockLog.success = true
blockLog.output = normalizedOutput blockLog.output = this.filterOutputForLog(block, normalizedOutput)
} }
ctx.blockStates.set(node.id, { this.state.setBlockOutput(node.id, normalizedOutput, duration)
output: normalizedOutput,
executed: true,
executionTime: duration,
})
if (!isSentinel) { if (!isSentinel) {
this.callOnBlockComplete(ctx, node, block, resolvedInputs, normalizedOutput, duration) const filteredOutput = this.filterOutputForLog(block, normalizedOutput)
this.callOnBlockComplete(ctx, node, block, resolvedInputs, filteredOutput, duration)
} }
return normalizedOutput return normalizedOutput
} catch (error) { } catch (error) {
const duration = Date.now() - startTime const duration = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = normalizeError(error)
if (blockLog) { if (blockLog) {
blockLog.endedAt = new Date().toISOString() blockLog.endedAt = new Date().toISOString()
@@ -106,11 +134,7 @@ export class BlockExecutor {
error: errorMessage, error: errorMessage,
} }
ctx.blockStates.set(node.id, { this.state.setBlockOutput(node.id, errorOutput, duration)
output: errorOutput,
executed: true,
executionTime: duration,
})
logger.error('Block execution failed', { logger.error('Block execution failed', {
blockId: node.id, blockId: node.id,
@@ -132,7 +156,39 @@ export class BlockExecutor {
return errorOutput return errorOutput
} }
throw error let errorToThrow: Error | string
if (error instanceof Error) {
errorToThrow = error
} else {
errorToThrow = errorMessage
}
throw buildBlockExecutionError({
block,
error: errorToThrow,
context: ctx,
additionalInfo: {
nodeId: node.id,
executionTime: duration,
},
})
}
}
private buildNodeMetadata(node: DAGNode): {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
} {
const metadata = node?.metadata ?? {}
return {
nodeId: node.id,
loopId: metadata.loopId,
parallelId: metadata.parallelId,
branchIndex: metadata.branchIndex,
branchTotal: metadata.branchTotal,
} }
} }
@@ -155,7 +211,7 @@ export class BlockExecutor {
block: SerializedBlock, block: SerializedBlock,
node: DAGNode node: DAGNode
): BlockLog { ): BlockLog {
let blockName = block.metadata?.name || blockId let blockName = block.metadata?.name ?? blockId
let loopId: string | undefined let loopId: string | undefined
let parallelId: string | undefined let parallelId: string | undefined
let iterationIndex: number | undefined let iterationIndex: number | undefined
@@ -165,24 +221,12 @@ export class BlockExecutor {
blockName = `${blockName} (iteration ${node.metadata.branchIndex})` blockName = `${blockName} (iteration ${node.metadata.branchIndex})`
iterationIndex = node.metadata.branchIndex iterationIndex = node.metadata.branchIndex
parallelId = node.metadata.parallelId parallelId = node.metadata.parallelId
logger.debug('Added parallel iteration suffix', { } else if (node.metadata.isLoopNode && node.metadata.loopId) {
blockId,
parallelId,
branchIndex: node.metadata.branchIndex,
blockName,
})
} else if (node.metadata.isLoopNode && node.metadata.loopId && this.state) {
loopId = node.metadata.loopId loopId = node.metadata.loopId
const loopScope = this.state.getLoopScope(loopId) const loopScope = ctx.loopExecutions?.get(loopId)
if (loopScope && loopScope.iteration !== undefined) { if (loopScope && loopScope.iteration !== undefined) {
blockName = `${blockName} (iteration ${loopScope.iteration})` blockName = `${blockName} (iteration ${loopScope.iteration})`
iterationIndex = loopScope.iteration iterationIndex = loopScope.iteration
logger.debug('Added loop iteration suffix', {
blockId,
loopId,
iteration: loopScope.iteration,
blockName,
})
} else { } else {
logger.warn('Loop scope not found for block', { blockId, loopId }) logger.warn('Loop scope not found for block', { blockId, loopId })
} }
@@ -192,7 +236,7 @@ export class BlockExecutor {
return { return {
blockId, blockId,
blockName, blockName,
blockType: block.metadata?.id || DEFAULTS.BLOCK_TYPE, blockType: block.metadata?.id ?? DEFAULTS.BLOCK_TYPE,
startedAt: new Date().toISOString(), startedAt: new Date().toISOString(),
endedAt: '', endedAt: '',
durationMs: 0, durationMs: 0,
@@ -215,12 +259,28 @@ export class BlockExecutor {
return { result: output } return { result: output }
} }
private filterOutputForLog(
block: SerializedBlock,
output: NormalizedBlockOutput
): NormalizedBlockOutput {
if (block.metadata?.id === BlockType.APPROVAL) {
const filtered: NormalizedBlockOutput = {}
for (const [key, value] of Object.entries(output)) {
if (key.startsWith('_')) continue
if (key === 'response') continue
filtered[key] = value
}
return filtered
}
return output
}
private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void { private callOnBlockStart(ctx: ExecutionContext, node: DAGNode, block: SerializedBlock): void {
const blockId = node.id const blockId = node.id
const blockName = block.metadata?.name || blockId const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id || DEFAULTS.BLOCK_TYPE const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
const iterationContext = this.getIterationContext(node) const iterationContext = this.getIterationContext(ctx, node)
if (this.contextExtensions.onBlockStart) { if (this.contextExtensions.onBlockStart) {
this.contextExtensions.onBlockStart(blockId, blockName, blockType, iterationContext) this.contextExtensions.onBlockStart(blockId, blockName, blockType, iterationContext)
@@ -236,10 +296,10 @@ export class BlockExecutor {
duration: number duration: number
): void { ): void {
const blockId = node.id const blockId = node.id
const blockName = block.metadata?.name || blockId const blockName = block.metadata?.name ?? blockId
const blockType = block.metadata?.id || DEFAULTS.BLOCK_TYPE const blockType = block.metadata?.id ?? DEFAULTS.BLOCK_TYPE
const iterationContext = this.getIterationContext(node) const iterationContext = this.getIterationContext(ctx, node)
if (this.contextExtensions.onBlockComplete) { if (this.contextExtensions.onBlockComplete) {
this.contextExtensions.onBlockComplete( this.contextExtensions.onBlockComplete(
@@ -257,6 +317,7 @@ export class BlockExecutor {
} }
private getIterationContext( private getIterationContext(
ctx: ExecutionContext,
node: DAGNode node: DAGNode
): { iterationCurrent: number; iterationTotal: number; iterationType: SubflowType } | undefined { ): { iterationCurrent: number; iterationTotal: number; iterationType: SubflowType } | undefined {
if (!node?.metadata) return undefined if (!node?.metadata) return undefined
@@ -269,8 +330,8 @@ export class BlockExecutor {
} }
} }
if (node.metadata.isLoopNode && node.metadata.loopId && this.state) { if (node.metadata.isLoopNode && node.metadata.loopId) {
const loopScope = this.state.getLoopScope(node.metadata.loopId) const loopScope = ctx.loopExecutions?.get(node.metadata.loopId)
if (loopScope && loopScope.iteration !== undefined && loopScope.maxIterations) { if (loopScope && loopScope.iteration !== undefined && loopScope.maxIterations) {
return { return {
iterationCurrent: loopScope.iteration, iterationCurrent: loopScope.iteration,
@@ -282,4 +343,74 @@ export class BlockExecutor {
return undefined return undefined
} }
private preparePauseResumeSelfReference(
ctx: ExecutionContext,
node: DAGNode,
block: SerializedBlock,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): (() => void) | undefined {
const blockId = node.id
const existingState = ctx.blockStates.get(blockId)
if (existingState?.executed) {
return undefined
}
const executionId = ctx.executionId ?? ctx.metadata?.executionId
const workflowId = ctx.workflowId
if (!executionId || !workflowId) {
return undefined
}
const { loopScope } = mapNodeMetadataToPauseScopes(ctx, nodeMetadata)
const contextId = generatePauseContextId(block.id, nodeMetadata, loopScope)
let resumeLinks: { apiUrl: string; uiUrl: string }
try {
const baseUrl = getBaseUrl()
resumeLinks = {
apiUrl: buildResumeApiUrl(baseUrl, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(baseUrl, workflowId, executionId),
}
} catch {
resumeLinks = {
apiUrl: buildResumeApiUrl(undefined, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(undefined, workflowId, executionId),
}
}
let previousState: BlockState | undefined
if (existingState) {
previousState = { ...existingState }
}
const hadPrevious = existingState !== undefined
const placeholderState: BlockState = {
output: {
uiUrl: resumeLinks.uiUrl,
apiUrl: resumeLinks.apiUrl,
},
executed: false,
executionTime: existingState?.executionTime ?? 0,
}
this.state.setBlockState(blockId, placeholderState)
return () => {
if (hadPrevious && previousState) {
this.state.setBlockState(blockId, previousState)
} else {
this.state.deleteBlockState(blockId)
}
}
}
} }

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { EDGE } from '@/executor/consts' import { EDGE } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { DAGEdge } from '@/executor/dag/types'
import type { NormalizedBlockOutput } from '@/executor/types' import type { NormalizedBlockOutput } from '@/executor/types'
import type { DAG, DAGNode } from '../dag/builder'
import type { DAGEdge } from '../dag/types'
const logger = createLogger('EdgeManager') const logger = createLogger('EdgeManager')
@@ -17,15 +17,9 @@ export class EdgeManager {
skipBackwardsEdge = false skipBackwardsEdge = false
): string[] { ): string[] {
const readyNodes: string[] = [] const readyNodes: string[] = []
logger.debug('Processing outgoing edges', {
nodeId: node.id,
edgeCount: node.outgoingEdges.size,
skipBackwardsEdge,
})
for (const [edgeId, edge] of node.outgoingEdges) { for (const [edgeId, edge] of node.outgoingEdges) {
if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) { if (skipBackwardsEdge && this.isBackwardsEdge(edge.sourceHandle)) {
logger.debug('Skipping backwards edge', { edgeId })
continue continue
} }
@@ -40,14 +34,6 @@ export class EdgeManager {
this.deactivateEdgeAndDescendants(node.id, edge.target, edge.sourceHandle) this.deactivateEdgeAndDescendants(node.id, edge.target, edge.sourceHandle)
} }
logger.debug('Edge not activated', {
edgeId,
sourceHandle: edge.sourceHandle,
from: node.id,
to: edge.target,
isLoopEdge,
deactivatedDescendants: !isLoopEdge,
})
continue continue
} }
@@ -58,14 +44,8 @@ export class EdgeManager {
} }
targetNode.incomingEdges.delete(node.id) targetNode.incomingEdges.delete(node.id)
logger.debug('Removed incoming edge', {
from: node.id,
target: edge.target,
remainingIncomingEdges: targetNode.incomingEdges.size,
})
if (this.isNodeReady(targetNode)) { if (this.isNodeReady(targetNode)) {
logger.debug('Node ready', { nodeId: targetNode.id })
readyNodes.push(targetNode.id) readyNodes.push(targetNode.id)
} }
} }
@@ -80,18 +60,9 @@ export class EdgeManager {
const activeIncomingCount = this.countActiveIncomingEdges(node) const activeIncomingCount = this.countActiveIncomingEdges(node)
if (activeIncomingCount > 0) { if (activeIncomingCount > 0) {
logger.debug('Node not ready - waiting for active incoming edges', {
nodeId: node.id,
totalIncoming: node.incomingEdges.size,
activeIncoming: activeIncomingCount,
})
return false return false
} }
logger.debug('Node ready - all remaining edges are deactivated', {
nodeId: node.id,
totalIncoming: node.incomingEdges.size,
})
return true return true
} }
@@ -103,10 +74,6 @@ export class EdgeManager {
} }
targetNode.incomingEdges.add(sourceNodeId) targetNode.incomingEdges.add(sourceNodeId)
logger.debug('Restored incoming edge', {
from: sourceNodeId,
to: targetNodeId,
})
} }
clearDeactivatedEdges(): void { clearDeactivatedEdges(): void {
@@ -116,34 +83,38 @@ export class EdgeManager {
private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean { private shouldActivateEdge(edge: DAGEdge, output: NormalizedBlockOutput): boolean {
const handle = edge.sourceHandle const handle = edge.sourceHandle
if (handle?.startsWith(EDGE.CONDITION_PREFIX)) { if (!handle) {
return true
}
if (handle.startsWith(EDGE.CONDITION_PREFIX)) {
const conditionValue = handle.substring(EDGE.CONDITION_PREFIX.length) const conditionValue = handle.substring(EDGE.CONDITION_PREFIX.length)
return output.selectedOption === conditionValue return output.selectedOption === conditionValue
} }
if (handle?.startsWith(EDGE.ROUTER_PREFIX)) { if (handle.startsWith(EDGE.ROUTER_PREFIX)) {
const routeId = handle.substring(EDGE.ROUTER_PREFIX.length) const routeId = handle.substring(EDGE.ROUTER_PREFIX.length)
return output.selectedRoute === routeId return output.selectedRoute === routeId
} }
if (handle === EDGE.LOOP_CONTINUE || handle === EDGE.LOOP_CONTINUE_ALT) { switch (handle) {
case EDGE.LOOP_CONTINUE:
case EDGE.LOOP_CONTINUE_ALT:
return output.selectedRoute === EDGE.LOOP_CONTINUE return output.selectedRoute === EDGE.LOOP_CONTINUE
}
if (handle === EDGE.LOOP_EXIT) { case EDGE.LOOP_EXIT:
return output.selectedRoute === EDGE.LOOP_EXIT return output.selectedRoute === EDGE.LOOP_EXIT
}
if (handle === EDGE.ERROR && !output.error) { case EDGE.ERROR:
return false return !!output.error
}
if (handle === EDGE.SOURCE && output.error) { case EDGE.SOURCE:
return false return !output.error
}
default:
return true return true
} }
}
private isBackwardsEdge(sourceHandle?: string): boolean { private isBackwardsEdge(sourceHandle?: string): boolean {
return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT return sourceHandle === EDGE.LOOP_CONTINUE || sourceHandle === EDGE.LOOP_CONTINUE_ALT
@@ -165,7 +136,6 @@ export class EdgeManager {
const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, sourceId) const hasOtherActiveIncoming = this.hasActiveIncomingEdges(targetNode, sourceId)
if (!hasOtherActiveIncoming) { if (!hasOtherActiveIncoming) {
logger.debug('Deactivating descendants of unreachable node', { nodeId: targetId })
for (const [_, outgoingEdge] of targetNode.outgoingEdges) { for (const [_, outgoingEdge] of targetNode.outgoingEdges) {
this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle) this.deactivateEdgeAndDescendants(targetId, outgoingEdge.target, outgoingEdge.sourceHandle)
} }
@@ -218,6 +188,6 @@ export class EdgeManager {
} }
private createEdgeKey(sourceId: string, targetId: string, sourceHandle?: string): string { private createEdgeKey(sourceId: string, targetId: string, sourceHandle?: string): string {
return `${sourceId}-${targetId}-${sourceHandle || EDGE.DEFAULT}` return `${sourceId}-${targetId}-${sourceHandle ?? EDGE.DEFAULT}`
} }
} }

View File

@@ -1,9 +1,18 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { BlockType } from '@/executor/consts' import { BlockType } from '@/executor/consts'
import type { ExecutionContext, ExecutionResult, NormalizedBlockOutput } from '@/executor/types' import type { DAG } from '@/executor/dag/builder'
import type { DAG } from '../dag/builder' import type { EdgeManager } from '@/executor/execution/edge-manager'
import type { NodeExecutionOrchestrator } from '../orchestrators/node' import { serializePauseSnapshot } from '@/executor/execution/snapshot-serializer'
import type { EdgeManager } from './edge-manager' import type { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
import type {
ExecutionContext,
ExecutionResult,
NormalizedBlockOutput,
PauseMetadata,
PausePoint,
ResumeStatus,
} from '@/executor/types'
import { normalizeError } from '@/executor/utils/errors'
const logger = createLogger('ExecutionEngine') const logger = createLogger('ExecutionEngine')
@@ -12,32 +21,32 @@ export class ExecutionEngine {
private executing = new Set<Promise<void>>() private executing = new Set<Promise<void>>()
private queueLock = Promise.resolve() private queueLock = Promise.resolve()
private finalOutput: NormalizedBlockOutput = {} private finalOutput: NormalizedBlockOutput = {}
private pausedBlocks: Map<string, PauseMetadata> = new Map()
private allowResumeTriggers: boolean
constructor( constructor(
private context: ExecutionContext,
private dag: DAG, private dag: DAG,
private edgeManager: EdgeManager, private edgeManager: EdgeManager,
private nodeOrchestrator: NodeExecutionOrchestrator, private nodeOrchestrator: NodeExecutionOrchestrator
private context: ExecutionContext ) {
) {} this.allowResumeTriggers = this.context.metadata.resumeFromSnapshot === true
}
async run(triggerBlockId?: string): Promise<ExecutionResult> { async run(triggerBlockId?: string): Promise<ExecutionResult> {
const startTime = Date.now() const startTime = Date.now()
try { try {
this.initializeQueue(triggerBlockId) this.initializeQueue(triggerBlockId)
logger.debug('Starting execution loop', {
initialQueueSize: this.readyQueue.length,
startNodeId: triggerBlockId,
})
while (this.hasWork()) { while (this.hasWork()) {
await this.processQueue() await this.processQueue()
} }
logger.debug('Execution loop completed', {
finalOutputKeys: Object.keys(this.finalOutput),
})
await this.waitForAllExecutions() await this.waitForAllExecutions()
if (this.pausedBlocks.size > 0) {
return this.buildPausedResult(startTime)
}
const endTime = Date.now() const endTime = Date.now()
this.context.metadata.endTime = new Date(endTime).toISOString() this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime this.context.metadata.duration = endTime - startTime
@@ -53,7 +62,7 @@ export class ExecutionEngine {
this.context.metadata.endTime = new Date(endTime).toISOString() this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime this.context.metadata.duration = endTime - startTime
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = normalizeError(error)
logger.error('Execution failed', { error: errorMessage }) logger.error('Execution failed', { error: errorMessage })
const executionResult: ExecutionResult = { const executionResult: ExecutionResult = {
@@ -74,9 +83,13 @@ export class ExecutionEngine {
} }
private addToQueue(nodeId: string): void { private addToQueue(nodeId: string): void {
const node = this.dag.nodes.get(nodeId)
if (node?.metadata?.isResumeTrigger && !this.allowResumeTriggers) {
return
}
if (!this.readyQueue.includes(nodeId)) { if (!this.readyQueue.includes(nodeId)) {
this.readyQueue.push(nodeId) this.readyQueue.push(nodeId)
logger.debug('Added to queue', { nodeId, queueLength: this.readyQueue.length })
} }
} }
@@ -122,6 +135,56 @@ export class ExecutionEngine {
} }
private initializeQueue(triggerBlockId?: string): void { private initializeQueue(triggerBlockId?: string): void {
const pendingBlocks = this.context.metadata.pendingBlocks
const remainingEdges = (this.context.metadata as any).remainingEdges
if (remainingEdges && Array.isArray(remainingEdges) && remainingEdges.length > 0) {
logger.info('Removing edges from resumed pause blocks', {
edgeCount: remainingEdges.length,
edges: remainingEdges,
})
for (const edge of remainingEdges) {
const targetNode = this.dag.nodes.get(edge.target)
if (targetNode) {
const hadEdge = targetNode.incomingEdges.has(edge.source)
targetNode.incomingEdges.delete(edge.source)
if (this.edgeManager.isNodeReady(targetNode)) {
logger.info('Node became ready after edge removal', { nodeId: targetNode.id })
this.addToQueue(targetNode.id)
}
}
}
logger.info('Edge removal complete, queued ready nodes', {
queueLength: this.readyQueue.length,
queuedNodes: this.readyQueue,
})
return
}
if (pendingBlocks && pendingBlocks.length > 0) {
logger.info('Initializing queue from pending blocks (resume mode)', {
pendingBlocks,
allowResumeTriggers: this.allowResumeTriggers,
dagNodeCount: this.dag.nodes.size,
})
for (const nodeId of pendingBlocks) {
this.addToQueue(nodeId)
}
logger.info('Pending blocks queued', {
queueLength: this.readyQueue.length,
queuedNodes: this.readyQueue,
})
this.context.metadata.pendingBlocks = []
return
}
if (triggerBlockId) { if (triggerBlockId) {
this.addToQueue(triggerBlockId) this.addToQueue(triggerBlockId)
return return
@@ -155,18 +218,17 @@ export class ExecutionEngine {
private async executeNodeAsync(nodeId: string): Promise<void> { private async executeNodeAsync(nodeId: string): Promise<void> {
try { try {
const wasAlreadyExecuted = this.context.executedBlocks.has(nodeId) const wasAlreadyExecuted = this.context.executedBlocks.has(nodeId)
const result = await this.nodeOrchestrator.executeNode(nodeId, this.context) const node = this.dag.nodes.get(nodeId)
const result = await this.nodeOrchestrator.executeNode(this.context, nodeId)
if (!wasAlreadyExecuted) { if (!wasAlreadyExecuted) {
await this.withQueueLock(async () => { await this.withQueueLock(async () => {
await this.handleNodeCompletion(nodeId, result.output, result.isFinalOutput) await this.handleNodeCompletion(nodeId, result.output, result.isFinalOutput)
}) })
} else {
logger.debug('Node was already executed, skipping edge processing to avoid loops', {
nodeId,
})
} }
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error) const errorMessage = normalizeError(error)
logger.error('Node execution failed', { nodeId, error: errorMessage }) logger.error('Node execution failed', { nodeId, error: errorMessage })
throw error throw error
} }
@@ -183,19 +245,73 @@ export class ExecutionEngine {
return return
} }
await this.nodeOrchestrator.handleNodeCompletion(nodeId, output, this.context) if (output._pauseMetadata) {
const pauseMetadata = output._pauseMetadata
this.pausedBlocks.set(pauseMetadata.contextId, pauseMetadata)
this.context.metadata.status = 'paused'
this.context.metadata.pausePoints = Array.from(this.pausedBlocks.keys())
return
}
await this.nodeOrchestrator.handleNodeCompletion(this.context, nodeId, output)
if (isFinalOutput) { if (isFinalOutput) {
this.finalOutput = output this.finalOutput = output
} }
const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false) const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false)
this.addMultipleToQueue(readyNodes)
logger.debug('Node completion handled', { logger.info('Processing outgoing edges', {
nodeId, nodeId,
outgoingEdgesCount: node.outgoingEdges.size,
readyNodesCount: readyNodes.length, readyNodesCount: readyNodes.length,
queueSize: this.readyQueue.length, readyNodes,
}) })
this.addMultipleToQueue(readyNodes)
}
private buildPausedResult(startTime: number): ExecutionResult {
const endTime = Date.now()
this.context.metadata.endTime = new Date(endTime).toISOString()
this.context.metadata.duration = endTime - startTime
this.context.metadata.status = 'paused'
const snapshotSeed = serializePauseSnapshot(this.context, [], this.dag)
const pausePoints: PausePoint[] = Array.from(this.pausedBlocks.values()).map((pause) => ({
contextId: pause.contextId,
blockId: pause.blockId,
response: pause.response,
registeredAt: pause.timestamp,
resumeStatus: 'paused' as ResumeStatus,
snapshotReady: true,
parallelScope: pause.parallelScope,
loopScope: pause.loopScope,
resumeLinks: pause.resumeLinks,
}))
return {
success: true,
output: this.collectPauseResponses(),
logs: this.context.blockLogs,
metadata: this.context.metadata,
status: 'paused',
pausePoints,
snapshotSeed,
}
}
private collectPauseResponses(): NormalizedBlockOutput {
const responses = Array.from(this.pausedBlocks.values()).map((pause) => pause.response)
if (responses.length === 1) {
return responses[0]
}
return {
pausedBlocks: responses,
pauseCount: responses.length,
}
} }
} }

View File

@@ -1,23 +1,24 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { StartBlockPath } from '@/lib/workflows/triggers'
import type { BlockOutput } from '@/blocks/types' import type { BlockOutput } from '@/blocks/types'
import { DAGBuilder } from '@/executor/dag/builder'
import { BlockExecutor } from '@/executor/execution/block-executor'
import { EdgeManager } from '@/executor/execution/edge-manager'
import { ExecutionEngine } from '@/executor/execution/engine'
import { ExecutionState } from '@/executor/execution/state'
import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
import { createBlockHandlers } from '@/executor/handlers/registry' import { createBlockHandlers } from '@/executor/handlers/registry'
import type { ExecutionContext, ExecutionResult } from '@/executor/types' import { LoopOrchestrator } from '@/executor/orchestrators/loop'
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
import { import {
buildResolutionFromBlock, buildResolutionFromBlock,
buildStartBlockOutput, buildStartBlockOutput,
resolveExecutorStartBlock, resolveExecutorStartBlock,
} from '@/executor/utils/start-block' } from '@/executor/utils/start-block'
import { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
import { DAGBuilder } from '../dag/builder'
import { LoopOrchestrator } from '../orchestrators/loop'
import { NodeExecutionOrchestrator } from '../orchestrators/node'
import { ParallelOrchestrator } from '../orchestrators/parallel'
import { VariableResolver } from '../variables/resolver'
import { BlockExecutor } from './block-executor'
import { EdgeManager } from './edge-manager'
import { ExecutionEngine } from './engine'
import { ExecutionState } from './state'
import type { ContextExtensions, WorkflowInput } from './types'
const logger = createLogger('DAGExecutor') const logger = createLogger('DAGExecutor')
@@ -32,7 +33,6 @@ export interface DAGExecutorOptions {
export class DAGExecutor { export class DAGExecutor {
private workflow: SerializedWorkflow private workflow: SerializedWorkflow
private initialBlockStates: Record<string, BlockOutput>
private environmentVariables: Record<string, string> private environmentVariables: Record<string, string>
private workflowInput: WorkflowInput private workflowInput: WorkflowInput
private workflowVariables: Record<string, unknown> private workflowVariables: Record<string, unknown>
@@ -42,19 +42,25 @@ export class DAGExecutor {
constructor(options: DAGExecutorOptions) { constructor(options: DAGExecutorOptions) {
this.workflow = options.workflow this.workflow = options.workflow
this.initialBlockStates = options.currentBlockStates || {} this.environmentVariables = options.envVarValues ?? {}
this.environmentVariables = options.envVarValues || {} this.workflowInput = options.workflowInput ?? {}
this.workflowInput = options.workflowInput || {} this.workflowVariables = options.workflowVariables ?? {}
this.workflowVariables = options.workflowVariables || {} this.contextExtensions = options.contextExtensions ?? {}
this.contextExtensions = options.contextExtensions || {}
this.dagBuilder = new DAGBuilder() this.dagBuilder = new DAGBuilder()
} }
async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> { async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> {
const dag = this.dagBuilder.build(this.workflow, triggerBlockId) const savedIncomingEdges = this.contextExtensions.dagIncomingEdges
const context = this.createExecutionContext(workflowId, triggerBlockId) const dag = this.dagBuilder.build(this.workflow, triggerBlockId, savedIncomingEdges)
// Create state with shared references to context's maps/sets for single source of truth const { context, state } = this.createExecutionContext(workflowId, triggerBlockId)
const state = new ExecutionState(context.blockStates, context.executedBlocks)
// Link cancellation flag to context
Object.defineProperty(context, 'isCancelled', {
get: () => this.isCancelled,
enumerable: true,
configurable: true,
})
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state) const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver) const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
const parallelOrchestrator = new ParallelOrchestrator(dag, state) const parallelOrchestrator = new ParallelOrchestrator(dag, state)
@@ -68,7 +74,7 @@ export class DAGExecutor {
loopOrchestrator, loopOrchestrator,
parallelOrchestrator parallelOrchestrator
) )
const engine = new ExecutionEngine(dag, edgeManager, nodeOrchestrator, context) const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
return await engine.run(triggerBlockId) return await engine.run(triggerBlockId)
} }
@@ -77,14 +83,14 @@ export class DAGExecutor {
} }
async continueExecution( async continueExecution(
pendingBlocks: string[], _pendingBlocks: string[],
context: ExecutionContext context: ExecutionContext
): Promise<ExecutionResult> { ): Promise<ExecutionResult> {
logger.warn('Debug mode (continueExecution) is not yet implemented in the refactored executor') logger.warn('Debug mode (continueExecution) is not yet implemented in the refactored executor')
return { return {
success: false, success: false,
output: {}, output: {},
logs: context.blockLogs || [], logs: context.blockLogs ?? [],
error: 'Debug mode is not yet supported in the refactored executor', error: 'Debug mode is not yet supported in the refactored executor',
metadata: { metadata: {
duration: 0, duration: 0,
@@ -93,44 +99,118 @@ export class DAGExecutor {
} }
} }
private createExecutionContext(workflowId: string, triggerBlockId?: string): ExecutionContext { private createExecutionContext(
workflowId: string,
triggerBlockId?: string
): { context: ExecutionContext; state: ExecutionState } {
const snapshotState = this.contextExtensions.snapshotState
const blockStates = snapshotState?.blockStates
? new Map(Object.entries(snapshotState.blockStates))
: new Map<string, BlockState>()
const executedBlocks = snapshotState?.executedBlocks
? new Set(snapshotState.executedBlocks)
: new Set<string>()
const state = new ExecutionState(blockStates, executedBlocks)
const context: ExecutionContext = { const context: ExecutionContext = {
workflowId, workflowId,
workspaceId: this.contextExtensions.workspaceId, workspaceId: this.contextExtensions.workspaceId,
executionId: this.contextExtensions.executionId, executionId: this.contextExtensions.executionId,
userId: this.contextExtensions.userId, userId: this.contextExtensions.userId,
isDeployedContext: this.contextExtensions.isDeployedContext, isDeployedContext: this.contextExtensions.isDeployedContext,
blockStates: new Map(), blockStates: state.getBlockStates(),
blockLogs: [], blockLogs: snapshotState?.blockLogs ?? [],
metadata: { metadata: {
startTime: new Date().toISOString(), startTime: new Date().toISOString(),
duration: 0, duration: 0,
useDraftState: this.contextExtensions.isDeployedContext !== true,
}, },
environmentVariables: this.environmentVariables, environmentVariables: this.environmentVariables,
workflowVariables: this.workflowVariables, workflowVariables: this.workflowVariables,
decisions: { decisions: {
router: new Map(), router: snapshotState?.decisions?.router
condition: new Map(), ? new Map(Object.entries(snapshotState.decisions.router))
: new Map(),
condition: snapshotState?.decisions?.condition
? new Map(Object.entries(snapshotState.decisions.condition))
: new Map(),
}, },
loopIterations: new Map(), completedLoops: snapshotState?.completedLoops
loopItems: new Map(), ? new Set(snapshotState.completedLoops)
completedLoops: new Set(), : new Set(),
executedBlocks: new Set(), loopExecutions: snapshotState?.loopExecutions
activeExecutionPath: new Set(), ? new Map(
Object.entries(snapshotState.loopExecutions).map(([loopId, scope]) => [
loopId,
{
...scope,
currentIterationOutputs: scope.currentIterationOutputs
? new Map(Object.entries(scope.currentIterationOutputs))
: new Map(),
},
])
)
: new Map(),
parallelExecutions: snapshotState?.parallelExecutions
? new Map(
Object.entries(snapshotState.parallelExecutions).map(([parallelId, scope]) => [
parallelId,
{
...scope,
branchOutputs: scope.branchOutputs
? new Map(Object.entries(scope.branchOutputs).map(([k, v]) => [Number(k), v]))
: new Map(),
},
])
)
: new Map(),
executedBlocks: state.getExecutedBlocks(),
activeExecutionPath: snapshotState?.activeExecutionPath
? new Set(snapshotState.activeExecutionPath)
: new Set(),
workflow: this.workflow, workflow: this.workflow,
stream: this.contextExtensions.stream || false, stream: this.contextExtensions.stream ?? false,
selectedOutputs: this.contextExtensions.selectedOutputs || [], selectedOutputs: this.contextExtensions.selectedOutputs ?? [],
edges: this.contextExtensions.edges || [], edges: this.contextExtensions.edges ?? [],
onStream: this.contextExtensions.onStream, onStream: this.contextExtensions.onStream,
onBlockStart: this.contextExtensions.onBlockStart, onBlockStart: this.contextExtensions.onBlockStart,
onBlockComplete: this.contextExtensions.onBlockComplete, onBlockComplete: this.contextExtensions.onBlockComplete,
} }
this.initializeStarterBlock(context, triggerBlockId) if (this.contextExtensions.resumeFromSnapshot) {
return context context.metadata.resumeFromSnapshot = true
logger.info('Resume from snapshot enabled', {
resumePendingQueue: this.contextExtensions.resumePendingQueue,
remainingEdges: this.contextExtensions.remainingEdges,
triggerBlockId,
})
} }
private initializeStarterBlock(context: ExecutionContext, triggerBlockId?: string): void { if (this.contextExtensions.remainingEdges) {
;(context.metadata as any).remainingEdges = this.contextExtensions.remainingEdges
logger.info('Set remaining edges for resume', {
edgeCount: this.contextExtensions.remainingEdges.length,
})
}
if (this.contextExtensions.resumePendingQueue?.length) {
context.metadata.pendingBlocks = [...this.contextExtensions.resumePendingQueue]
logger.info('Set pending blocks from resume queue', {
pendingBlocks: context.metadata.pendingBlocks,
skipStarterBlockInit: true,
})
} else {
this.initializeStarterBlock(context, state, triggerBlockId)
}
return { context, state }
}
private initializeStarterBlock(
context: ExecutionContext,
state: ExecutionState,
triggerBlockId?: string
): void {
let startResolution: ReturnType<typeof resolveExecutorStartBlock> | null = null let startResolution: ReturnType<typeof resolveExecutorStartBlock> | null = null
if (triggerBlockId) { if (triggerBlockId) {
@@ -145,14 +225,10 @@ export class DAGExecutor {
startResolution = buildResolutionFromBlock(triggerBlock) startResolution = buildResolutionFromBlock(triggerBlock)
if (!startResolution) { if (!startResolution) {
logger.debug('Creating generic resolution for trigger block', {
triggerBlockId,
blockType: triggerBlock.metadata?.id,
})
startResolution = { startResolution = {
blockId: triggerBlock.id, blockId: triggerBlock.id,
block: triggerBlock, block: triggerBlock,
path: 'split_manual' as any, path: StartBlockPath.SPLIT_MANUAL,
} }
} }
} else { } else {
@@ -167,21 +243,20 @@ export class DAGExecutor {
} }
} }
if (state.getBlockStates().has(startResolution.block.id)) {
return
}
const blockOutput = buildStartBlockOutput({ const blockOutput = buildStartBlockOutput({
resolution: startResolution, resolution: startResolution,
workflowInput: this.workflowInput, workflowInput: this.workflowInput,
isDeployedExecution: this.contextExtensions?.isDeployedContext === true, isDeployedExecution: this.contextExtensions?.isDeployedContext === true,
}) })
context.blockStates.set(startResolution.block.id, { state.setBlockState(startResolution.block.id, {
output: blockOutput, output: blockOutput,
executed: true, executed: false,
executionTime: 0, executionTime: 0,
}) })
logger.debug('Initialized start block', {
blockId: startResolution.block.id,
blockType: startResolution.block.metadata?.id,
})
} }
} }

View File

@@ -0,0 +1,129 @@
import type { DAG } from '@/executor/dag/builder'
import type { SerializableExecutionState } from '@/executor/execution/snapshot'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionContext, ExecutionMetadata, SerializedSnapshot } from '@/executor/types'
function mapFromEntries<T>(map?: Map<string, T>): Record<string, T> | undefined {
if (!map) return undefined
return Object.fromEntries(map)
}
function setToArray<T>(set?: Set<T>): T[] | undefined {
if (!set) return undefined
return Array.from(set)
}
function serializeLoopExecutions(
loopExecutions?: Map<string, any>
): Record<string, any> | undefined {
if (!loopExecutions) return undefined
const result: Record<string, any> = {}
for (const [loopId, scope] of loopExecutions.entries()) {
let currentIterationOutputs: any
if (scope.currentIterationOutputs instanceof Map) {
currentIterationOutputs = Object.fromEntries(scope.currentIterationOutputs)
} else {
currentIterationOutputs = scope.currentIterationOutputs ?? {}
}
result[loopId] = {
...scope,
currentIterationOutputs,
}
}
return result
}
function serializeParallelExecutions(
parallelExecutions?: Map<string, any>
): Record<string, any> | undefined {
if (!parallelExecutions) return undefined
const result: Record<string, any> = {}
for (const [parallelId, scope] of parallelExecutions.entries()) {
let branchOutputs: any
if (scope.branchOutputs instanceof Map) {
branchOutputs = Object.fromEntries(scope.branchOutputs)
} else {
branchOutputs = scope.branchOutputs ?? {}
}
result[parallelId] = {
...scope,
branchOutputs,
}
}
return result
}
export function serializePauseSnapshot(
context: ExecutionContext,
triggerBlockIds: string[],
dag?: DAG
): SerializedSnapshot {
const metadataFromContext = context.metadata as ExecutionMetadata | undefined
let useDraftState: boolean
if (metadataFromContext?.useDraftState !== undefined) {
useDraftState = metadataFromContext.useDraftState
} else if (context.isDeployedContext === true) {
useDraftState = false
} else {
useDraftState = true
}
const dagIncomingEdges: Record<string, string[]> | undefined = dag
? Object.fromEntries(
Array.from(dag.nodes.entries()).map(([nodeId, node]) => [
nodeId,
Array.from(node.incomingEdges),
])
)
: undefined
const state: SerializableExecutionState = {
blockStates: Object.fromEntries(context.blockStates),
executedBlocks: Array.from(context.executedBlocks),
blockLogs: context.blockLogs,
decisions: {
router: Object.fromEntries(context.decisions.router),
condition: Object.fromEntries(context.decisions.condition),
},
completedLoops: Array.from(context.completedLoops),
loopExecutions: serializeLoopExecutions(context.loopExecutions),
parallelExecutions: serializeParallelExecutions(context.parallelExecutions),
parallelBlockMapping: mapFromEntries(context.parallelBlockMapping),
activeExecutionPath: Array.from(context.activeExecutionPath),
pendingQueue: triggerBlockIds,
dagIncomingEdges,
}
const executionMetadata = {
requestId:
(context.metadata as any)?.requestId ??
context.executionId ??
context.workflowId ??
'unknown',
executionId: context.executionId ?? 'unknown',
workflowId: context.workflowId,
workspaceId: context.workspaceId,
userId: (context.metadata as any)?.userId ?? '',
triggerType: (context.metadata as any)?.triggerType ?? 'manual',
triggerBlockId: triggerBlockIds[0],
useDraftState,
startTime: context.metadata.startTime ?? new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(
executionMetadata,
context.workflow,
{},
context.environmentVariables ?? {},
context.workflowVariables ?? {},
context.selectedOutputs ?? [],
state
)
return {
snapshot: snapshot.toJSON(),
triggerIds: triggerBlockIds,
}
}

View File

@@ -11,6 +11,8 @@ export interface ExecutionMetadata {
triggerBlockId?: string triggerBlockId?: string
useDraftState: boolean useDraftState: boolean
startTime: string startTime: string
pendingBlocks?: string[]
resumeFromSnapshot?: boolean
} }
export interface ExecutionCallbacks { export interface ExecutionCallbacks {
@@ -33,8 +35,6 @@ export interface SerializableExecutionState {
router: Record<string, string> router: Record<string, string>
condition: Record<string, string> condition: Record<string, string>
} }
loopIterations: Record<string, number>
loopItems: Record<string, any>
completedLoops: string[] completedLoops: string[]
loopExecutions?: Record<string, any> loopExecutions?: Record<string, any>
parallelExecutions?: Record<string, any> parallelExecutions?: Record<string, any>
@@ -42,6 +42,8 @@ export interface SerializableExecutionState {
activeExecutionPath: string[] activeExecutionPath: string[]
pendingQueue?: string[] pendingQueue?: string[]
remainingEdges?: Edge[] remainingEdges?: Edge[]
dagIncomingEdges?: Record<string, string[]>
completedPauseContexts?: string[]
} }
export class ExecutionSnapshot { export class ExecutionSnapshot {
@@ -80,19 +82,3 @@ export class ExecutionSnapshot {
) )
} }
} }
// TODO: Implement pause/resume functionality
//
// Future implementation should include:
// 1. executor.pause() - Captures current state mid-execution
// - Serialize ExecutionContext (blockStates, decisions, loops, etc) to state property
// - Save snapshot.toJSON() to database
// 2. executor.resume(snapshot) - Reconstructs execution from saved state
// - Load snapshot from database
// - Restore ExecutionContext from state property
// - Continue execution from pendingQueue
// 3. API endpoints:
// - POST /api/executions/[id]/pause
// - POST /api/executions/[id]/resume
// 4. Database schema:
// - execution_snapshots table with snapshot JSON column

View File

@@ -1,4 +1,9 @@
import type { NormalizedBlockOutput } from '@/executor/types' import type { BlockStateController } from '@/executor/execution/types'
import type { BlockState, NormalizedBlockOutput } from '@/executor/types'
function normalizeLookupId(id: string): string {
return id.replace(/\d+/gu, '').replace(/_loop\d+/g, '')
}
export interface LoopScope { export interface LoopScope {
iteration: number iteration: number
currentIterationOutputs: Map<string, NormalizedBlockOutput> currentIterationOutputs: Map<string, NormalizedBlockOutput>
@@ -18,53 +23,77 @@ export interface ParallelScope {
totalExpectedNodes: number totalExpectedNodes: number
} }
export class ExecutionState { export class ExecutionState implements BlockStateController {
// Shared references with ExecutionContext for single source of truth private readonly blockStates: Map<string, BlockState>
readonly blockStates: Map< private readonly executedBlocks: Set<string>
string,
{ output: NormalizedBlockOutput; executed: boolean; executionTime: number }
>
readonly executedBlocks: Set<string>
readonly loopScopes = new Map<string, LoopScope>()
readonly parallelScopes = new Map<string, ParallelScope>()
constructor( constructor(blockStates?: Map<string, BlockState>, executedBlocks?: Set<string>) {
blockStates: Map< this.blockStates = blockStates ?? new Map()
string, this.executedBlocks = executedBlocks ?? new Set()
{ output: NormalizedBlockOutput; executed: boolean; executionTime: number }
>,
executedBlocks: Set<string>
) {
this.blockStates = blockStates
this.executedBlocks = executedBlocks
} }
getBlockOutput(blockId: string): NormalizedBlockOutput | undefined { getBlockStates(): ReadonlyMap<string, BlockState> {
return this.blockStates.get(blockId)?.output return this.blockStates
} }
setBlockOutput(blockId: string, output: NormalizedBlockOutput): void { getExecutedBlocks(): ReadonlySet<string> {
this.blockStates.set(blockId, { output, executed: true, executionTime: 0 }) return this.executedBlocks
}
getBlockOutput(blockId: string, currentNodeId?: string): NormalizedBlockOutput | undefined {
const direct = this.blockStates.get(blockId)?.output
if (direct !== undefined) {
return direct
}
const normalizedId = normalizeLookupId(blockId)
if (normalizedId !== blockId) {
return undefined
}
if (currentNodeId) {
const currentSuffix = currentNodeId.replace(normalizedId, '').match(/₍\d+₎/g)?.[0] ?? ''
const loopSuffix = currentNodeId.match(/_loop\d+/)?.[0] ?? ''
const withSuffix = `${blockId}${currentSuffix}${loopSuffix}`
const suffixedOutput = this.blockStates.get(withSuffix)?.output
if (suffixedOutput !== undefined) {
return suffixedOutput
}
}
for (const [storedId, state] of this.blockStates.entries()) {
if (normalizeLookupId(storedId) === blockId) {
return state.output
}
}
return undefined
}
setBlockOutput(blockId: string, output: NormalizedBlockOutput, executionTime = 0): void {
this.blockStates.set(blockId, { output, executed: true, executionTime })
this.executedBlocks.add(blockId) this.executedBlocks.add(blockId)
} }
setBlockState(blockId: string, state: BlockState): void {
this.blockStates.set(blockId, state)
if (state.executed) {
this.executedBlocks.add(blockId)
} else {
this.executedBlocks.delete(blockId)
}
}
deleteBlockState(blockId: string): void {
this.blockStates.delete(blockId)
this.executedBlocks.delete(blockId)
}
unmarkExecuted(blockId: string): void {
this.executedBlocks.delete(blockId)
}
hasExecuted(blockId: string): boolean { hasExecuted(blockId: string): boolean {
return this.executedBlocks.has(blockId) return this.executedBlocks.has(blockId)
} }
getLoopScope(loopId: string): LoopScope | undefined {
return this.loopScopes.get(loopId)
}
setLoopScope(loopId: string, scope: LoopScope): void {
this.loopScopes.set(loopId, scope)
}
getParallelScope(parallelId: string): ParallelScope | undefined {
return this.parallelScopes.get(parallelId)
}
setParallelScope(parallelId: string, scope: ParallelScope): void {
this.parallelScopes.set(parallelId, scope)
}
} }

View File

@@ -1,4 +1,4 @@
import type { NormalizedBlockOutput } from '@/executor/types' import type { BlockState, NormalizedBlockOutput } from '@/executor/types'
import type { SubflowType } from '@/stores/workflows/workflow/types' import type { SubflowType } from '@/stores/workflows/workflow/types'
export interface ContextExtensions { export interface ContextExtensions {
@@ -10,6 +10,16 @@ export interface ContextExtensions {
edges?: Array<{ source: string; target: string }> edges?: Array<{ source: string; target: string }>
isDeployedContext?: boolean isDeployedContext?: boolean
isChildExecution?: boolean isChildExecution?: boolean
resumeFromSnapshot?: boolean
resumePendingQueue?: string[]
remainingEdges?: Array<{
source: string
target: string
sourceHandle?: string
targetHandle?: string
}>
dagIncomingEdges?: Record<string, string[]>
snapshotState?: import('@/executor/execution/snapshot').SerializableExecutionState
onStream?: (streamingExecution: unknown) => Promise<void> onStream?: (streamingExecution: unknown) => Promise<void>
onBlockStart?: ( onBlockStart?: (
blockId: string, blockId: string,
@@ -37,3 +47,17 @@ export interface ContextExtensions {
export interface WorkflowInput { export interface WorkflowInput {
[key: string]: unknown [key: string]: unknown
} }
export interface BlockStateReader {
getBlockOutput(blockId: string, currentNodeId?: string): NormalizedBlockOutput | undefined
hasExecuted(blockId: string): boolean
}
export interface BlockStateWriter {
setBlockOutput(blockId: string, output: NormalizedBlockOutput, executionTime?: number): void
setBlockState(blockId: string, state: BlockState): void
deleteBlockState(blockId: string): void
unmarkExecuted(blockId: string): void
}
export type BlockStateController = BlockStateReader & BlockStateWriter

View File

@@ -109,8 +109,7 @@ describe('AgentBlockHandler', () => {
metadata: { startTime: new Date().toISOString(), duration: 0 }, metadata: { startTime: new Date().toISOString(), duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
completedLoops: new Set(), completedLoops: new Set(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),

View File

@@ -34,8 +34,6 @@ export class AgentBlockHandler implements BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: AgentInputs inputs: AgentInputs
): Promise<BlockOutput | StreamingExecution> { ): Promise<BlockOutput | StreamingExecution> {
logger.info(`Executing agent block: ${block.id}`)
const responseFormat = this.parseResponseFormat(inputs.responseFormat) const responseFormat = this.parseResponseFormat(inputs.responseFormat)
const model = inputs.model || AGENT.DEFAULT_MODEL const model = inputs.model || AGENT.DEFAULT_MODEL
const providerId = getProviderFromModel(model) const providerId = getProviderFromModel(model)
@@ -76,9 +74,6 @@ export class AgentBlockHandler implements BlockHandler {
const trimmedValue = responseFormat.trim() const trimmedValue = responseFormat.trim()
if (trimmedValue.startsWith('<') && trimmedValue.includes('>')) { if (trimmedValue.startsWith('<') && trimmedValue.includes('>')) {
logger.info('Response format contains variable reference:', {
value: trimmedValue,
})
return undefined return undefined
} }
@@ -163,10 +158,8 @@ export class AgentBlockHandler implements BlockHandler {
if (tool.code) { if (tool.code) {
base.executeFunction = async (callParams: Record<string, any>) => { base.executeFunction = async (callParams: Record<string, any>) => {
// Merge user-provided parameters with LLM-generated parameters
const mergedParams = mergeToolParameters(userProvidedParams, callParams) const mergedParams = mergeToolParameters(userProvidedParams, callParams)
// Collect block outputs for tag resolution
const { blockData, blockNameMapping } = collectBlockData(ctx) const { blockData, blockNameMapping } = collectBlockData(ctx)
const result = await executeTool( const result = await executeTool(
@@ -257,8 +250,6 @@ export class AgentBlockHandler implements BlockHandler {
params: userProvidedParams, params: userProvidedParams,
usageControl: tool.usageControl || 'auto', usageControl: tool.usageControl || 'auto',
executeFunction: async (callParams: Record<string, any>) => { executeFunction: async (callParams: Record<string, any>) => {
logger.info(`Executing MCP tool ${toolName} on server ${serverId}`)
const headers = await buildAuthHeaders() const headers = await buildAuthHeaders()
const execUrl = buildAPIUrl('/api/mcp/tools/execute') const execUrl = buildAPIUrl('/api/mcp/tools/execute')
@@ -565,8 +556,6 @@ export class AgentBlockHandler implements BlockHandler {
responseFormat: any, responseFormat: any,
providerStartTime: number providerStartTime: number
) { ) {
logger.info('Using HTTP provider request (browser environment)')
const url = buildAPIUrl('/api/providers') const url = buildAPIUrl('/api/providers')
const response = await fetch(url.toString(), { const response = await fetch(url.toString(), {
method: 'POST', method: 'POST',
@@ -589,10 +578,8 @@ export class AgentBlockHandler implements BlockHandler {
'HTTP response' 'HTTP response'
) )
// Check if this is a streaming response
const contentType = response.headers.get('Content-Type') const contentType = response.headers.get('Content-Type')
if (contentType?.includes(HTTP.CONTENT_TYPE.EVENT_STREAM)) { if (contentType?.includes(HTTP.CONTENT_TYPE.EVENT_STREAM)) {
logger.info('Received streaming response')
return this.handleStreamingResponse(response, block) return this.handleStreamingResponse(response, block)
} }
@@ -664,15 +651,6 @@ export class AgentBlockHandler implements BlockHandler {
: response && typeof response === 'object' && 'stream' in response : response && typeof response === 'object' && 'stream' in response
? 'streaming-execution' ? 'streaming-execution'
: 'json' : 'json'
logger.info('Provider request completed successfully', {
provider,
model,
workflowId: ctx.workflowId,
blockId: block.id,
executionTime,
responseType,
})
} }
private handleExecutionError( private handleExecutionError(
@@ -745,7 +723,6 @@ export class AgentBlockHandler implements BlockHandler {
block: SerializedBlock block: SerializedBlock
): StreamingExecution { ): StreamingExecution {
const streamingExec = response as StreamingExecution const streamingExec = response as StreamingExecution
logger.info(`Received StreamingExecution for block ${block.id}`)
if (streamingExec.execution.output) { if (streamingExec.execution.output) {
const execution = streamingExec.execution as any const execution = streamingExec.execution as any
@@ -786,16 +763,11 @@ export class AgentBlockHandler implements BlockHandler {
try { try {
const extractedJson = JSON.parse(content.trim()) const extractedJson = JSON.parse(content.trim())
logger.info('Successfully parsed structured response content')
return { return {
...extractedJson, ...extractedJson,
...this.createResponseMetadata(result), ...this.createResponseMetadata(result),
} }
} catch (error) { } catch (error) {
logger.info('JSON parsing failed', {
error: error instanceof Error ? error.message : 'Unknown error',
})
logger.error('LLM did not adhere to structured response format:', { logger.error('LLM did not adhere to structured response format:', {
content: content.substring(0, 200) + (content.length > 200 ? '...' : ''), content: content.substring(0, 200) + (content.length > 200 ? '...' : ''),
responseFormat: responseFormat, responseFormat: responseFormat,

View File

@@ -36,8 +36,7 @@ describe('ApiBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
completedLoops: new Set(), completedLoops: new Set(),

View File

@@ -1,7 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, HTTP } from '@/executor/consts' import { BlockType, HTTP } from '@/executor/consts'
import type { BlockHandler, ExecutionContext } from '@/executor/types' import type { BlockHandler, ExecutionContext } from '@/executor/types'
import { stringifyJSON } from '@/executor/utils/json'
import type { SerializedBlock } from '@/serializer/types' import type { SerializedBlock } from '@/serializer/types'
import { executeTool } from '@/tools' import { executeTool } from '@/tools'
import { getTool } from '@/tools/utils' import { getTool } from '@/tools/utils'
@@ -64,24 +63,13 @@ export class ApiBlockHandler implements BlockHandler {
const trimmedBody = processedInputs.body.trim() const trimmedBody = processedInputs.body.trim()
if (trimmedBody.startsWith('{') || trimmedBody.startsWith('[')) { if (trimmedBody.startsWith('{') || trimmedBody.startsWith('[')) {
processedInputs.body = JSON.parse(trimmedBody) processedInputs.body = JSON.parse(trimmedBody)
logger.info(
'[ApiBlockHandler] Parsed JSON body:',
stringifyJSON(processedInputs.body)
)
}
} catch (e) {
logger.info('[ApiBlockHandler] Failed to parse body as JSON, using as string:', e)
} }
} catch (e) {}
} else if (processedInputs.body === null) { } else if (processedInputs.body === null) {
processedInputs.body = undefined processedInputs.body = undefined
} }
} }
logger.info(
'[ApiBlockHandler] Final processed request body:',
stringifyJSON(processedInputs.body)
)
const result = await executeTool( const result = await executeTool(
block.config.tool, block.config.tool,
{ {

View File

@@ -101,8 +101,7 @@ describe('ConditionBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, // Now set the context's env vars environmentVariables: {}, // Now set the context's env vars
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set([mockSourceBlock.id]), executedBlocks: new Set([mockSourceBlock.id]),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
workflow: mockWorkflow as SerializedWorkflow, workflow: mockWorkflow as SerializedWorkflow,
@@ -333,13 +332,18 @@ describe('ConditionBlockHandler', () => {
it('should handle missing source block output gracefully', async () => { it('should handle missing source block output gracefully', async () => {
const conditions = [{ id: 'cond1', title: 'if', value: 'true' }] const conditions = [{ id: 'cond1', title: 'if', value: 'true' }]
const inputs = { conditions: JSON.stringify(conditions) } const inputs = { conditions: JSON.stringify(conditions) }
mockContext.blockStates.delete(mockSourceBlock.id)
// Create a new context with empty blockStates instead of trying to delete from readonly map
const contextWithoutSource = {
...mockContext,
blockStates: new Map<string, BlockState>(),
}
mockResolver.resolveVariableReferences.mockReturnValue('true') mockResolver.resolveVariableReferences.mockReturnValue('true')
mockResolver.resolveBlockReferences.mockReturnValue('true') mockResolver.resolveBlockReferences.mockReturnValue('true')
mockResolver.resolveEnvVariables.mockReturnValue('true') mockResolver.resolveEnvVariables.mockReturnValue('true')
const result = await handler.execute(mockContext, mockBlock, inputs) const result = await handler.execute(contextWithoutSource, mockBlock, inputs)
expect(result).toHaveProperty('conditionResult', true) expect(result).toHaveProperty('conditionResult', true)
expect(result).toHaveProperty('selectedConditionId', 'cond1') expect(result).toHaveProperty('selectedConditionId', 'cond1')
@@ -393,15 +397,13 @@ describe('ConditionBlockHandler', () => {
) )
}) })
it('should use loop context during evaluation if available', async () => { it('falls back to else path when loop context data is unavailable', async () => {
const conditions = [ const conditions = [
{ id: 'cond1', title: 'if', value: 'context.item === "apple"' }, { id: 'cond1', title: 'if', value: 'context.item === "apple"' },
{ id: 'else1', title: 'else', value: '' }, { id: 'else1', title: 'else', value: '' },
] ]
const inputs = { conditions: JSON.stringify(conditions) } const inputs = { conditions: JSON.stringify(conditions) }
mockContext.loopItems.set(mockBlock.id, { item: 'apple' })
// Mock the full resolution pipeline // Mock the full resolution pipeline
mockResolver.resolveVariableReferences.mockReturnValue('context.item === "apple"') mockResolver.resolveVariableReferences.mockReturnValue('context.item === "apple"')
mockResolver.resolveBlockReferences.mockReturnValue('context.item === "apple"') mockResolver.resolveBlockReferences.mockReturnValue('context.item === "apple"')
@@ -409,7 +411,7 @@ describe('ConditionBlockHandler', () => {
const result = await handler.execute(mockContext, mockBlock, inputs) const result = await handler.execute(mockContext, mockBlock, inputs)
expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('cond1') expect(mockContext.decisions.condition.get(mockBlock.id)).toBe('else1')
expect((result as any).selectedConditionId).toBe('cond1') expect((result as any).selectedConditionId).toBe('else1')
}) })
}) })

View File

@@ -17,9 +17,7 @@ export async function evaluateConditionExpression(
resolver: any, resolver: any,
providedEvalContext?: Record<string, any> providedEvalContext?: Record<string, any>
): Promise<boolean> { ): Promise<boolean> {
const evalContext = providedEvalContext || { const evalContext = providedEvalContext || {}
...(ctx.loopItems.get(block.id) || {}),
}
let resolvedConditionValue = conditionExpression let resolvedConditionValue = conditionExpression
try { try {
@@ -27,9 +25,6 @@ export async function evaluateConditionExpression(
const resolvedVars = resolver.resolveVariableReferences(conditionExpression, block) const resolvedVars = resolver.resolveVariableReferences(conditionExpression, block)
const resolvedRefs = resolver.resolveBlockReferences(resolvedVars, ctx, block) const resolvedRefs = resolver.resolveBlockReferences(resolvedVars, ctx, block)
resolvedConditionValue = resolver.resolveEnvVariables(resolvedRefs) resolvedConditionValue = resolver.resolveEnvVariables(resolvedRefs)
logger.info(
`Resolved condition: from "${conditionExpression}" to "${resolvedConditionValue}"`
)
} }
} catch (resolveError: any) { } catch (resolveError: any) {
logger.error(`Failed to resolve references in condition: ${resolveError.message}`, { logger.error(`Failed to resolve references in condition: ${resolveError.message}`, {
@@ -40,12 +35,10 @@ export async function evaluateConditionExpression(
} }
try { try {
logger.info(`Evaluating resolved condition: "${resolvedConditionValue}"`, { evalContext })
const conditionMet = new Function( const conditionMet = new Function(
'context', 'context',
`with(context) { return ${resolvedConditionValue} }` `with(context) { return ${resolvedConditionValue} }`
)(evalContext) )(evalContext)
logger.info(`Condition evaluated to: ${conditionMet}`)
return Boolean(conditionMet) return Boolean(conditionMet)
} catch (evalError: any) { } catch (evalError: any) {
logger.error(`Failed to evaluate condition: ${evalError.message}`, { logger.error(`Failed to evaluate condition: ${evalError.message}`, {
@@ -78,10 +71,6 @@ export class ConditionBlockHandler implements BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any> inputs: Record<string, any>
): Promise<BlockOutput> { ): Promise<BlockOutput> {
logger.info(`Executing condition block: ${block.id}`, {
rawConditionsInput: inputs.conditions,
})
const conditions = this.parseConditions(inputs.conditions) const conditions = this.parseConditions(inputs.conditions)
const sourceBlockId = ctx.workflow?.connections.find((conn) => conn.target === block.id)?.source const sourceBlockId = ctx.workflow?.connections.find((conn) => conn.target === block.id)?.source
@@ -103,10 +92,6 @@ export class ConditionBlockHandler implements BlockHandler {
throw new Error(`Target block ${selectedConnection?.target} not found`) throw new Error(`Target block ${selectedConnection?.target} not found`)
} }
logger.info(
`Condition block ${block.id} selected path: ${selectedCondition.title} (${selectedCondition.id}) -> ${targetBlock.metadata?.name || targetBlock.id}`
)
const decisionKey = ctx.currentVirtualBlockId || block.id const decisionKey = ctx.currentVirtualBlockId || block.id
ctx.decisions.condition.set(decisionKey, selectedCondition.id) ctx.decisions.condition.set(decisionKey, selectedCondition.id)
@@ -126,7 +111,6 @@ export class ConditionBlockHandler implements BlockHandler {
private parseConditions(input: any): Array<{ id: string; title: string; value: string }> { private parseConditions(input: any): Array<{ id: string; title: string; value: string }> {
try { try {
const conditions = Array.isArray(input) ? input : JSON.parse(input || '[]') const conditions = Array.isArray(input) ? input : JSON.parse(input || '[]')
logger.info('Parsed conditions:', conditions)
return conditions return conditions
} catch (error: any) { } catch (error: any) {
logger.error('Failed to parse conditions:', { input, error }) logger.error('Failed to parse conditions:', { input, error })
@@ -139,9 +123,7 @@ export class ConditionBlockHandler implements BlockHandler {
blockId: string, blockId: string,
sourceBlockId?: string sourceBlockId?: string
): Record<string, any> { ): Record<string, any> {
let evalContext: Record<string, any> = { let evalContext: Record<string, any> = {}
...(ctx.loopItems.get(blockId) || {}),
}
if (sourceBlockId) { if (sourceBlockId) {
const sourceOutput = ctx.blockStates.get(sourceBlockId)?.output const sourceOutput = ctx.blockStates.get(sourceBlockId)?.output
@@ -153,7 +135,6 @@ export class ConditionBlockHandler implements BlockHandler {
} }
} }
logger.info('Base eval context:', evalContext)
return evalContext return evalContext
} }
@@ -185,7 +166,6 @@ export class ConditionBlockHandler implements BlockHandler {
this.resolver, this.resolver,
evalContext evalContext
) )
logger.info(`Condition "${condition.title}" (${condition.id}) met: ${conditionMet}`)
const connection = this.findConnectionForCondition(outgoingConnections, condition.id) const connection = this.findConnectionForCondition(outgoingConnections, condition.id)

View File

@@ -40,8 +40,7 @@ describe('EvaluatorBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
completedLoops: new Set(), completedLoops: new Set(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),

View File

@@ -30,26 +30,28 @@ export class EvaluatorBlockHandler implements BlockHandler {
const processedContent = this.processContent(inputs.content) const processedContent = this.processContent(inputs.content)
// Parse system prompt object with robust error handling
let systemPromptObj: { systemPrompt: string; responseFormat: any } = { let systemPromptObj: { systemPrompt: string; responseFormat: any } = {
systemPrompt: '', systemPrompt: '',
responseFormat: null, responseFormat: null,
} }
logger.info('Inputs for evaluator:', inputs) logger.info('Inputs for evaluator:', inputs)
const metrics = Array.isArray(inputs.metrics) ? inputs.metrics : [] let metrics: any[]
if (Array.isArray(inputs.metrics)) {
metrics = inputs.metrics
} else {
metrics = []
}
logger.info('Metrics for evaluator:', metrics) logger.info('Metrics for evaluator:', metrics)
const metricDescriptions = metrics const metricDescriptions = metrics
.filter((m: any) => m?.name && m.range) // Filter out invalid/incomplete metrics .filter((m: any) => m?.name && m.range)
.map((m: any) => `"${m.name}" (${m.range.min}-${m.range.max}): ${m.description || ''}`) .map((m: any) => `"${m.name}" (${m.range.min}-${m.range.max}): ${m.description || ''}`)
.join('\n') .join('\n')
// Create a response format structure
const responseProperties: Record<string, any> = {} const responseProperties: Record<string, any> = {}
metrics.forEach((m: any) => { metrics.forEach((m: any) => {
// Ensure metric and name are valid before using them
if (m?.name) { if (m?.name) {
responseProperties[m.name.toLowerCase()] = { type: 'number' } // Use lowercase for consistency responseProperties[m.name.toLowerCase()] = { type: 'number' }
} else { } else {
logger.warn('Skipping invalid metric entry during response format generation:', m) logger.warn('Skipping invalid metric entry during response format generation:', m)
} }
@@ -77,7 +79,6 @@ export class EvaluatorBlockHandler implements BlockHandler {
}, },
} }
// Ensure we have a system prompt
if (!systemPromptObj.systemPrompt) { if (!systemPromptObj.systemPrompt) {
systemPromptObj.systemPrompt = systemPromptObj.systemPrompt =
'Evaluate the content and provide scores for each metric as JSON.' 'Evaluate the content and provide scores for each metric as JSON.'
@@ -155,7 +156,10 @@ export class EvaluatorBlockHandler implements BlockHandler {
if (typeof content === 'string') { if (typeof content === 'string') {
if (isJSONString(content)) { if (isJSONString(content)) {
const parsed = parseJSON(content, null) const parsed = parseJSON(content, null)
return parsed ? stringifyJSON(parsed) : content if (parsed) {
return stringifyJSON(parsed)
}
return content
} }
return content return content
} }
@@ -196,7 +200,12 @@ export class EvaluatorBlockHandler implements BlockHandler {
metrics: any metrics: any
): Record<string, number> { ): Record<string, number> {
const metricScores: Record<string, number> = {} const metricScores: Record<string, number> = {}
const validMetrics = Array.isArray(metrics) ? metrics : [] let validMetrics: any[]
if (Array.isArray(metrics)) {
validMetrics = metrics
} else {
validMetrics = []
}
if (Object.keys(parsedContent).length === 0) { if (Object.keys(parsedContent).length === 0) {
validMetrics.forEach((metric: any) => { validMetrics.forEach((metric: any) => {

View File

@@ -46,8 +46,7 @@ describe('FunctionBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
completedLoops: new Set(), completedLoops: new Set(),

View File

@@ -38,8 +38,7 @@ describe('GenericBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
completedLoops: new Set(), completedLoops: new Set(),

View File

@@ -17,8 +17,6 @@ export class GenericBlockHandler implements BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any> inputs: Record<string, any>
): Promise<any> { ): Promise<any> {
logger.info(`Executing block: ${block.id} (Type: ${block.metadata?.id})`)
const isMcpTool = block.config.tool?.startsWith('mcp-') const isMcpTool = block.config.tool?.startsWith('mcp-')
let tool = null let tool = null
@@ -38,10 +36,6 @@ export class GenericBlockHandler implements BlockHandler {
try { try {
const transformedParams = blockConfig.tools.config.params(inputs) const transformedParams = blockConfig.tools.config.params(inputs)
finalInputs = { ...inputs, ...transformedParams } finalInputs = { ...inputs, ...transformedParams }
logger.info(`Applied parameter transformation for block type: ${blockType}`, {
original: inputs,
transformed: transformedParams,
})
} catch (error) { } catch (error) {
logger.warn(`Failed to apply parameter transformation for block type ${blockType}:`, { logger.warn(`Failed to apply parameter transformation for block type ${blockType}:`, {
error: error instanceof Error ? error.message : String(error), error: error instanceof Error ? error.message : String(error),
@@ -50,14 +44,6 @@ export class GenericBlockHandler implements BlockHandler {
} }
} }
logger.info(`[GenericBlockHandler] Calling executeTool for ${block.config.tool}`, {
blockId: block.id,
blockName: block.metadata?.name,
originalInputs: inputs,
finalInputs: finalInputs,
tool: block.config.tool,
})
try { try {
const result = await executeTool( const result = await executeTool(
block.config.tool, block.config.tool,

View File

@@ -4,6 +4,7 @@ import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-h
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler' import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler' import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler' import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
import { PauseResumeBlockHandler } from '@/executor/handlers/pause-resume/pause-resume-handler'
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler' import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler' import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler' import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
@@ -19,6 +20,7 @@ export {
FunctionBlockHandler, FunctionBlockHandler,
GenericBlockHandler, GenericBlockHandler,
ResponseBlockHandler, ResponseBlockHandler,
PauseResumeBlockHandler,
RouterBlockHandler, RouterBlockHandler,
TriggerBlockHandler, TriggerBlockHandler,
VariablesBlockHandler, VariablesBlockHandler,

View File

@@ -0,0 +1,668 @@
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import type { BlockOutput } from '@/blocks/types'
import {
BlockType,
buildResumeApiUrl,
buildResumeUiUrl,
type FieldType,
HTTP,
PAUSE_RESUME,
} from '@/executor/consts'
import {
generatePauseContextId,
mapNodeMetadataToPauseScopes,
} from '@/executor/pause-resume/utils.ts'
import type { BlockHandler, ExecutionContext, PauseMetadata } from '@/executor/types'
import { collectBlockData } from '@/executor/utils/block-data'
import type { SerializedBlock } from '@/serializer/types'
import { normalizeBlockName } from '@/stores/workflows/utils'
import { executeTool } from '@/tools'
const logger = createLogger('PauseResumeBlockHandler')
interface JSONProperty {
id: string
name: string
type: FieldType
value: any
collapsed?: boolean
}
interface ResponseStructureEntry {
name: string
type: string
value: any
}
interface NormalizedInputField {
id: string
name: string
label: string
type: string
description?: string
placeholder?: string
value?: any
required?: boolean
options?: any[]
}
interface NotificationToolResult {
toolId: string
title?: string
operation?: string
success: boolean
durationMs?: number
}
export class PauseResumeBlockHandler implements BlockHandler {
canHandle(block: SerializedBlock): boolean {
return block.metadata?.id === BlockType.APPROVAL
}
async execute(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>
): Promise<BlockOutput> {
return this.executeWithNode(ctx, block, inputs, {
nodeId: block.id,
})
}
async executeWithNode(
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
): Promise<BlockOutput> {
try {
const operation = inputs.operation ?? PAUSE_RESUME.OPERATION.HUMAN
const { parallelScope, loopScope } = mapNodeMetadataToPauseScopes(ctx, nodeMetadata)
const contextId = generatePauseContextId(block.id, nodeMetadata, loopScope)
const timestamp = new Date().toISOString()
const executionId = ctx.executionId ?? ctx.metadata?.executionId
const workflowId = ctx.workflowId
let resumeLinks: typeof pauseMetadata.resumeLinks | undefined
if (executionId && workflowId) {
try {
const baseUrl = getBaseUrl()
resumeLinks = {
apiUrl: buildResumeApiUrl(baseUrl, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(baseUrl, workflowId, executionId),
contextId,
executionId,
workflowId,
}
} catch (error) {
logger.warn('Failed to get base URL, using relative paths', { error })
resumeLinks = {
apiUrl: buildResumeApiUrl(undefined, workflowId, executionId, contextId),
uiUrl: buildResumeUiUrl(undefined, workflowId, executionId),
contextId,
executionId,
workflowId,
}
}
}
const normalizedInputFormat = this.normalizeInputFormat(inputs.inputFormat)
const responseStructure = this.normalizeResponseStructure(inputs.builderData)
let responseData: any
let statusCode: number
let responseHeaders: Record<string, string>
if (operation === PAUSE_RESUME.OPERATION.API) {
const parsed = this.parseResponseData(inputs)
if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) {
responseData = {
...parsed,
operation,
responseStructure:
parsed.responseStructure && Array.isArray(parsed.responseStructure)
? parsed.responseStructure
: responseStructure,
}
} else {
responseData = parsed
}
statusCode = this.parseStatus(inputs.status)
responseHeaders = this.parseHeaders(inputs.headers)
} else {
responseData = {
operation,
responseStructure,
inputFormat: normalizedInputFormat,
submission: null,
}
statusCode = HTTP.STATUS.OK
responseHeaders = { 'Content-Type': HTTP.CONTENT_TYPE.JSON }
}
let notificationResults: NotificationToolResult[] | undefined
if (
operation === PAUSE_RESUME.OPERATION.HUMAN &&
inputs.notification &&
Array.isArray(inputs.notification)
) {
notificationResults = await this.executeNotificationTools(ctx, block, inputs.notification, {
resumeLinks,
executionId,
workflowId,
inputFormat: normalizedInputFormat,
responseStructure,
operation,
})
}
const responseDataWithResume =
resumeLinks &&
responseData &&
typeof responseData === 'object' &&
!Array.isArray(responseData)
? { ...responseData, _resume: resumeLinks }
: responseData
const pauseMetadata: PauseMetadata = {
contextId,
blockId: nodeMetadata.nodeId,
response: {
data: responseDataWithResume,
status: statusCode,
headers: responseHeaders,
},
timestamp,
parallelScope,
loopScope,
resumeLinks,
}
const responseOutput: Record<string, any> = {
data: responseDataWithResume,
status: statusCode,
headers: responseHeaders,
operation,
}
if (operation === PAUSE_RESUME.OPERATION.HUMAN) {
responseOutput.responseStructure = responseStructure
responseOutput.inputFormat = normalizedInputFormat
responseOutput.submission = null
}
if (resumeLinks) {
responseOutput.resume = resumeLinks
}
const structuredFields: Record<string, any> = {}
if (operation === PAUSE_RESUME.OPERATION.HUMAN) {
for (const field of normalizedInputFormat) {
if (field.name) {
structuredFields[field.name] = field.value !== undefined ? field.value : null
}
}
}
const output: Record<string, any> = {
...structuredFields,
response: responseOutput,
_pauseMetadata: pauseMetadata,
}
if (notificationResults && notificationResults.length > 0) {
output.notificationResults = notificationResults
}
if (resumeLinks) {
output.uiUrl = resumeLinks.uiUrl
output.apiUrl = resumeLinks.apiUrl
}
return output
} catch (error: any) {
logger.error('Pause resume block execution failed:', error)
return {
response: {
data: {
error: 'Pause resume block execution failed',
message: error.message || 'Unknown error',
},
status: HTTP.STATUS.SERVER_ERROR,
headers: { 'Content-Type': HTTP.CONTENT_TYPE.JSON },
},
}
}
}
private parseResponseData(inputs: Record<string, any>): any {
const dataMode = inputs.dataMode || 'structured'
if (dataMode === 'json' && inputs.data) {
if (typeof inputs.data === 'string') {
try {
return JSON.parse(inputs.data)
} catch (error) {
logger.warn('Failed to parse JSON data, returning as string:', error)
return inputs.data
}
} else if (typeof inputs.data === 'object' && inputs.data !== null) {
return inputs.data
}
return inputs.data
}
if (dataMode === 'structured' && inputs.builderData) {
const convertedData = this.convertBuilderDataToJson(inputs.builderData)
return this.parseObjectStrings(convertedData)
}
return inputs.data || {}
}
private normalizeResponseStructure(
builderData?: JSONProperty[],
prefix = ''
): ResponseStructureEntry[] {
if (!Array.isArray(builderData)) {
return []
}
const entries: ResponseStructureEntry[] = []
for (const prop of builderData) {
const fieldName = typeof prop.name === 'string' ? prop.name.trim() : ''
if (!fieldName) continue
const path = prefix ? `${prefix}.${fieldName}` : fieldName
if (prop.type === 'object' && Array.isArray(prop.value)) {
const nested = this.normalizeResponseStructure(prop.value, path)
if (nested.length > 0) {
entries.push(...nested)
continue
}
}
const value = this.convertPropertyValue(prop)
entries.push({
name: path,
type: prop.type,
value,
})
}
return entries
}
private normalizeInputFormat(inputFormat: any): NormalizedInputField[] {
if (!Array.isArray(inputFormat)) {
return []
}
return inputFormat
.map((field: any, index: number) => {
const name = typeof field?.name === 'string' ? field.name.trim() : ''
if (!name) return null
const id =
typeof field?.id === 'string' && field.id.length > 0 ? field.id : `field_${index}`
const label =
typeof field?.label === 'string' && field.label.trim().length > 0
? field.label.trim()
: name
const type =
typeof field?.type === 'string' && field.type.trim().length > 0 ? field.type : 'string'
const description =
typeof field?.description === 'string' && field.description.trim().length > 0
? field.description.trim()
: undefined
const placeholder =
typeof field?.placeholder === 'string' && field.placeholder.trim().length > 0
? field.placeholder.trim()
: undefined
const required = field?.required === true
const options = Array.isArray(field?.options) ? field.options : undefined
return {
id,
name,
label,
type,
description,
placeholder,
value: field?.value,
required,
options,
} as NormalizedInputField
})
.filter((field): field is NormalizedInputField => field !== null)
}
private convertBuilderDataToJson(builderData: JSONProperty[]): any {
if (!Array.isArray(builderData)) {
return {}
}
const result: any = {}
for (const prop of builderData) {
if (!prop.name || !prop.name.trim()) {
continue
}
const value = this.convertPropertyValue(prop)
result[prop.name] = value
}
return result
}
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
if (!Array.isArray(builderData) || builderData.length === 0) {
return '{\n \n}'
}
const result: any = {}
for (const prop of builderData) {
if (!prop.name || !prop.name.trim()) {
continue
}
result[prop.name] = prop.value
}
let jsonString = JSON.stringify(result, null, 2)
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
return jsonString
}
private convertPropertyValue(prop: JSONProperty): any {
switch (prop.type) {
case 'object':
return this.convertObjectValue(prop.value)
case 'array':
return this.convertArrayValue(prop.value)
case 'number':
return this.convertNumberValue(prop.value)
case 'boolean':
return this.convertBooleanValue(prop.value)
case 'files':
return prop.value
default:
return prop.value
}
}
private convertObjectValue(value: any): any {
if (Array.isArray(value)) {
return this.convertBuilderDataToJson(value)
}
if (typeof value === 'string' && !this.isVariableReference(value)) {
return this.tryParseJson(value, value)
}
return value
}
private convertArrayValue(value: any): any {
if (Array.isArray(value)) {
return value.map((item: any) => this.convertArrayItem(item))
}
if (typeof value === 'string' && !this.isVariableReference(value)) {
const parsed = this.tryParseJson(value, value)
return Array.isArray(parsed) ? parsed : value
}
return value
}
private convertArrayItem(item: any): any {
if (typeof item !== 'object' || !item.type) {
return item
}
if (item.type === 'object' && Array.isArray(item.value)) {
return this.convertBuilderDataToJson(item.value)
}
if (item.type === 'array' && Array.isArray(item.value)) {
return item.value.map((subItem: any) =>
typeof subItem === 'object' && subItem.type ? subItem.value : subItem
)
}
return item.value
}
private convertNumberValue(value: any): any {
if (this.isVariableReference(value)) {
return value
}
const numValue = Number(value)
return Number.isNaN(numValue) ? value : numValue
}
private convertBooleanValue(value: any): any {
if (this.isVariableReference(value)) {
return value
}
return value === 'true' || value === true
}
private tryParseJson(jsonString: string, fallback: any): any {
try {
return JSON.parse(jsonString)
} catch {
return fallback
}
}
private isVariableReference(value: any): boolean {
return typeof value === 'string' && value.trim().startsWith('<') && value.trim().includes('>')
}
private parseObjectStrings(data: any): any {
if (typeof data === 'string') {
try {
const parsed = JSON.parse(data)
if (typeof parsed === 'object' && parsed !== null) {
return this.parseObjectStrings(parsed)
}
return parsed
} catch {
return data
}
} else if (Array.isArray(data)) {
return data.map((item) => this.parseObjectStrings(item))
} else if (typeof data === 'object' && data !== null) {
const result: any = {}
for (const [key, value] of Object.entries(data)) {
result[key] = this.parseObjectStrings(value)
}
return result
}
return data
}
private parseStatus(status?: string): number {
if (!status) return HTTP.STATUS.OK
const parsed = Number(status)
if (Number.isNaN(parsed) || parsed < 100 || parsed > 599) {
return HTTP.STATUS.OK
}
return parsed
}
private parseHeaders(
headers: {
id: string
cells: { Key: string; Value: string }
}[]
): Record<string, string> {
const defaultHeaders = { 'Content-Type': HTTP.CONTENT_TYPE.JSON }
if (!headers) return defaultHeaders
const headerObj = headers.reduce((acc: Record<string, string>, header) => {
if (header?.cells?.Key && header?.cells?.Value) {
acc[header.cells.Key] = header.cells.Value
}
return acc
}, {})
return { ...defaultHeaders, ...headerObj }
}
private async executeNotificationTools(
ctx: ExecutionContext,
block: SerializedBlock,
tools: any[],
context: {
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
executionId?: string
workflowId?: string
inputFormat?: NormalizedInputField[]
responseStructure?: ResponseStructureEntry[]
operation?: string
}
): Promise<NotificationToolResult[]> {
if (!tools || tools.length === 0) {
return []
}
const { blockData: collectedBlockData, blockNameMapping: collectedBlockNameMapping } =
collectBlockData(ctx)
const blockDataWithPause: Record<string, any> = { ...collectedBlockData }
const blockNameMappingWithPause: Record<string, string> = { ...collectedBlockNameMapping }
const pauseBlockId = block.id
const pauseBlockName = block.metadata?.name
const pauseOutput: Record<string, any> = {
...(blockDataWithPause[pauseBlockId] || {}),
}
if (context.resumeLinks) {
if (context.resumeLinks.uiUrl) {
pauseOutput.uiUrl = context.resumeLinks.uiUrl
}
if (context.resumeLinks.apiUrl) {
pauseOutput.apiUrl = context.resumeLinks.apiUrl
}
}
if (Array.isArray(context.inputFormat)) {
for (const field of context.inputFormat) {
if (field?.name) {
const fieldName = field.name.trim()
if (fieldName.length > 0 && !(fieldName in pauseOutput)) {
pauseOutput[fieldName] = field.value !== undefined ? field.value : null
}
}
}
}
blockDataWithPause[pauseBlockId] = pauseOutput
if (pauseBlockName) {
blockNameMappingWithPause[pauseBlockName] = pauseBlockId
blockNameMappingWithPause[normalizeBlockName(pauseBlockName)] = pauseBlockId
}
const notificationPromises = tools.map<Promise<NotificationToolResult>>(async (toolConfig) => {
const startTime = Date.now()
try {
const toolId = toolConfig.toolId
if (!toolId) {
logger.warn('Notification tool missing toolId', { toolConfig })
return {
toolId: 'unknown',
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
}
}
const toolParams = {
...toolConfig.params,
_pauseContext: {
resumeApiUrl: context.resumeLinks?.apiUrl,
resumeUiUrl: context.resumeLinks?.uiUrl,
executionId: context.executionId,
workflowId: context.workflowId,
contextId: context.resumeLinks?.contextId,
inputFormat: context.inputFormat,
responseStructure: context.responseStructure,
operation: context.operation,
},
_context: {
workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId,
},
blockData: blockDataWithPause,
blockNameMapping: blockNameMappingWithPause,
}
const result = await executeTool(toolId, toolParams, false, false, ctx)
const durationMs = Date.now() - startTime
if (!result.success) {
logger.warn('Notification tool execution failed', {
toolId,
error: result.error,
})
return {
toolId,
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
durationMs,
}
}
return {
toolId,
title: toolConfig.title,
operation: toolConfig.operation,
success: true,
durationMs,
}
} catch (error) {
logger.error('Error executing notification tool', { error, toolConfig })
return {
toolId: toolConfig.toolId || 'unknown',
title: toolConfig.title,
operation: toolConfig.operation,
success: false,
}
}
})
return Promise.all(notificationPromises)
}
}

View File

@@ -5,19 +5,20 @@
* Creates handlers for real user blocks (not infrastructure like sentinels). * Creates handlers for real user blocks (not infrastructure like sentinels).
*/ */
import { AgentBlockHandler } from '@/executor/handlers/agent/agent-handler'
import { ApiBlockHandler } from '@/executor/handlers/api/api-handler'
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
import { PauseResumeBlockHandler } from '@/executor/handlers/pause-resume/pause-resume-handler'
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
import { VariablesBlockHandler } from '@/executor/handlers/variables/variables-handler'
import { WaitBlockHandler } from '@/executor/handlers/wait/wait-handler'
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
import type { BlockHandler } from '@/executor/types' import type { BlockHandler } from '@/executor/types'
import { AgentBlockHandler } from './agent/agent-handler'
import { ApiBlockHandler } from './api/api-handler'
import { ConditionBlockHandler } from './condition/condition-handler'
import { EvaluatorBlockHandler } from './evaluator/evaluator-handler'
import { FunctionBlockHandler } from './function/function-handler'
import { GenericBlockHandler } from './generic/generic-handler'
import { ResponseBlockHandler } from './response/response-handler'
import { RouterBlockHandler } from './router/router-handler'
import { TriggerBlockHandler } from './trigger/trigger-handler'
import { VariablesBlockHandler } from './variables/variables-handler'
import { WaitBlockHandler } from './wait/wait-handler'
import { WorkflowBlockHandler } from './workflow/workflow-handler'
/** /**
* Create all block handlers * Create all block handlers
@@ -27,20 +28,19 @@ import { WorkflowBlockHandler } from './workflow/workflow-handler'
*/ */
export function createBlockHandlers(): BlockHandler[] { export function createBlockHandlers(): BlockHandler[] {
return [ return [
// Core block handlers
new TriggerBlockHandler(), new TriggerBlockHandler(),
new FunctionBlockHandler(), new FunctionBlockHandler(),
new ApiBlockHandler(), new ApiBlockHandler(),
new ConditionBlockHandler(), new ConditionBlockHandler(),
new RouterBlockHandler(), new RouterBlockHandler(),
new ResponseBlockHandler(), new ResponseBlockHandler(),
new PauseResumeBlockHandler(),
new AgentBlockHandler(), new AgentBlockHandler(),
new VariablesBlockHandler(), new VariablesBlockHandler(),
new WorkflowBlockHandler(), new WorkflowBlockHandler(),
new WaitBlockHandler(), new WaitBlockHandler(),
new EvaluatorBlockHandler(), new EvaluatorBlockHandler(),
// Generic handler must be last (fallback)
new GenericBlockHandler(), new GenericBlockHandler(),
] ]
} }

View File

@@ -63,7 +63,6 @@ export class ResponseBlockHandler implements BlockHandler {
const dataMode = inputs.dataMode || 'structured' const dataMode = inputs.dataMode || 'structured'
if (dataMode === 'json' && inputs.data) { if (dataMode === 'json' && inputs.data) {
// Handle JSON mode - data comes from code editor
if (typeof inputs.data === 'string') { if (typeof inputs.data === 'string') {
try { try {
return JSON.parse(inputs.data) return JSON.parse(inputs.data)
@@ -72,19 +71,16 @@ export class ResponseBlockHandler implements BlockHandler {
return inputs.data return inputs.data
} }
} else if (typeof inputs.data === 'object' && inputs.data !== null) { } else if (typeof inputs.data === 'object' && inputs.data !== null) {
// Data is already an object, return as-is
return inputs.data return inputs.data
} }
return inputs.data return inputs.data
} }
if (dataMode === 'structured' && inputs.builderData) { if (dataMode === 'structured' && inputs.builderData) {
// Handle structured mode - convert builderData to JSON
const convertedData = this.convertBuilderDataToJson(inputs.builderData) const convertedData = this.convertBuilderDataToJson(inputs.builderData)
return this.parseObjectStrings(convertedData) return this.parseObjectStrings(convertedData)
} }
// Fallback to inputs.data for backward compatibility
return inputs.data || {} return inputs.data || {}
} }
@@ -107,7 +103,6 @@ export class ResponseBlockHandler implements BlockHandler {
return result return result
} }
// Static method for UI conversion from Builder to Editor mode
static convertBuilderDataToJsonString(builderData: JSONProperty[]): string { static convertBuilderDataToJsonString(builderData: JSONProperty[]): string {
if (!Array.isArray(builderData) || builderData.length === 0) { if (!Array.isArray(builderData) || builderData.length === 0) {
return '{\n \n}' return '{\n \n}'
@@ -120,15 +115,11 @@ export class ResponseBlockHandler implements BlockHandler {
continue continue
} }
// For UI display, keep variable references as-is without processing
result[prop.name] = prop.value result[prop.name] = prop.value
} }
// Convert to JSON string, then replace quoted variable references with unquoted ones
let jsonString = JSON.stringify(result, null, 2) let jsonString = JSON.stringify(result, null, 2)
// Replace quoted variable references with unquoted ones
// Pattern: "<variable.name>" -> <variable.name>
jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1') jsonString = jsonString.replace(/"(<[^>]+>)"/g, '$1')
return jsonString return jsonString
@@ -145,7 +136,6 @@ export class ResponseBlockHandler implements BlockHandler {
case 'boolean': case 'boolean':
return this.convertBooleanValue(prop.value) return this.convertBooleanValue(prop.value)
case 'files': case 'files':
// File values should be passed through as-is (UserFile objects)
return prop.value return prop.value
default: default:
return prop.value return prop.value
@@ -161,7 +151,6 @@ export class ResponseBlockHandler implements BlockHandler {
return this.tryParseJson(value, value) return this.tryParseJson(value, value)
} }
// Keep variable references or other values as-is (they'll be resolved later)
return value return value
} }
@@ -172,10 +161,12 @@ export class ResponseBlockHandler implements BlockHandler {
if (typeof value === 'string' && !this.isVariableReference(value)) { if (typeof value === 'string' && !this.isVariableReference(value)) {
const parsed = this.tryParseJson(value, value) const parsed = this.tryParseJson(value, value)
return Array.isArray(parsed) ? parsed : value if (Array.isArray(parsed)) {
return parsed
}
return value
} }
// Keep variable references or other values as-is
return value return value
} }
@@ -189,9 +180,12 @@ export class ResponseBlockHandler implements BlockHandler {
} }
if (item.type === 'array' && Array.isArray(item.value)) { if (item.type === 'array' && Array.isArray(item.value)) {
return item.value.map((subItem: any) => return item.value.map((subItem: any) => {
typeof subItem === 'object' && subItem.type ? subItem.value : subItem if (typeof subItem === 'object' && subItem.type) {
) return subItem.value
}
return subItem
})
} }
return item.value return item.value
@@ -203,7 +197,10 @@ export class ResponseBlockHandler implements BlockHandler {
} }
const numValue = Number(value) const numValue = Number(value)
return Number.isNaN(numValue) ? value : numValue if (Number.isNaN(numValue)) {
return value
}
return numValue
} }
private convertBooleanValue(value: any): any { private convertBooleanValue(value: any): any {
@@ -228,15 +225,14 @@ export class ResponseBlockHandler implements BlockHandler {
private parseObjectStrings(data: any): any { private parseObjectStrings(data: any): any {
if (typeof data === 'string') { if (typeof data === 'string') {
// Try to parse strings that might be JSON objects
try { try {
const parsed = JSON.parse(data) const parsed = JSON.parse(data)
if (typeof parsed === 'object' && parsed !== null) { if (typeof parsed === 'object' && parsed !== null) {
return this.parseObjectStrings(parsed) // Recursively parse nested objects return this.parseObjectStrings(parsed)
} }
return parsed return parsed
} catch { } catch {
return data // Return as string if not valid JSON return data
} }
} else if (Array.isArray(data)) { } else if (Array.isArray(data)) {
return data.map((item) => this.parseObjectStrings(item)) return data.map((item) => this.parseObjectStrings(item))

View File

@@ -65,8 +65,7 @@ describe('RouterBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
completedLoops: new Set(), completedLoops: new Set(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),

View File

@@ -58,16 +58,13 @@ export class RouterBlockHandler implements BlockHandler {
}) })
if (!response.ok) { if (!response.ok) {
// Try to extract a helpful error message
let errorMessage = `Provider API request failed with status ${response.status}` let errorMessage = `Provider API request failed with status ${response.status}`
try { try {
const errorData = await response.json() const errorData = await response.json()
if (errorData.error) { if (errorData.error) {
errorMessage = errorData.error errorMessage = errorData.error
} }
} catch (_e) { } catch (_e) {}
// If JSON parsing fails, use the original error message
}
throw new Error(errorMessage) throw new Error(errorMessage)
} }
@@ -90,7 +87,6 @@ export class RouterBlockHandler implements BlockHandler {
total: DEFAULTS.TOKENS.TOTAL, total: DEFAULTS.TOKENS.TOTAL,
} }
// Calculate cost based on token usage, similar to how providers do it
const cost = calculateCost( const cost = calculateCost(
result.model, result.model,
tokens.prompt || DEFAULTS.TOKENS.PROMPT, tokens.prompt || DEFAULTS.TOKENS.PROMPT,
@@ -116,7 +112,7 @@ export class RouterBlockHandler implements BlockHandler {
blockType: chosenBlock.type || DEFAULTS.BLOCK_TYPE, blockType: chosenBlock.type || DEFAULTS.BLOCK_TYPE,
blockTitle: chosenBlock.title || DEFAULTS.BLOCK_TITLE, blockTitle: chosenBlock.title || DEFAULTS.BLOCK_TITLE,
}, },
selectedRoute: String(chosenBlock.id), // Used by ExecutionEngine to activate the correct edge selectedRoute: String(chosenBlock.id),
} as BlockOutput } as BlockOutput
} catch (error) { } catch (error) {
logger.error('Router execution failed:', error) logger.error('Router execution failed:', error)

View File

@@ -19,8 +19,7 @@ describe('TriggerBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
completedLoops: new Set(), completedLoops: new Set(),

View File

@@ -43,18 +43,10 @@ export class TriggerBlockHandler implements BlockHandler {
if (starterState?.output && Object.keys(starterState.output).length > 0) { if (starterState?.output && Object.keys(starterState.output).length > 0) {
const starterOutput = starterState.output const starterOutput = starterState.output
// Generic handling for webhook triggers - extract provider-specific data
// Check if this is a webhook execution
if (starterOutput.webhook?.data) { if (starterOutput.webhook?.data) {
const webhookData = starterOutput.webhook?.data || {} const webhookData = starterOutput.webhook?.data || {}
const provider = webhookData.provider const provider = webhookData.provider
logger.debug(`Processing webhook trigger for block ${block.id}`, {
provider,
blockType: block.metadata?.id,
})
if (provider === 'github') { if (provider === 'github') {
const payloadSource = webhookData.payload || {} const payloadSource = webhookData.payload || {}
return { return {
@@ -136,21 +128,14 @@ export class TriggerBlockHandler implements BlockHandler {
return result return result
} }
logger.debug(`Returning starter block output for trigger block ${block.id}`, {
starterOutputKeys: Object.keys(starterOutput),
})
return starterOutput return starterOutput
} }
} }
if (inputs && Object.keys(inputs).length > 0) { if (inputs && Object.keys(inputs).length > 0) {
logger.debug(`Returning trigger inputs for block ${block.id}`, {
inputKeys: Object.keys(inputs),
})
return inputs return inputs
} }
logger.debug(`No inputs provided for trigger block ${block.id}, returning empty object`)
return {} return {}
} }
@@ -165,10 +150,6 @@ export class TriggerBlockHandler implements BlockHandler {
const existingState = ctx.blockStates.get(block.id) const existingState = ctx.blockStates.get(block.id)
if (existingState?.output && Object.keys(existingState.output).length > 0) { if (existingState?.output && Object.keys(existingState.output).length > 0) {
logger.debug('Returning pre-initialized starter block output', {
blockId: block.id,
outputKeys: Object.keys(existingState.output),
})
return existingState.output return existingState.output
} }

View File

@@ -9,11 +9,6 @@ const logger = createLogger('VariablesBlockHandler')
export class VariablesBlockHandler implements BlockHandler { export class VariablesBlockHandler implements BlockHandler {
canHandle(block: SerializedBlock): boolean { canHandle(block: SerializedBlock): boolean {
const canHandle = block.metadata?.id === BlockType.VARIABLES const canHandle = block.metadata?.id === BlockType.VARIABLES
logger.info(`VariablesBlockHandler.canHandle: ${canHandle}`, {
blockId: block.id,
metadataId: block.metadata?.id,
expectedType: BlockType.VARIABLES,
})
return canHandle return canHandle
} }
@@ -22,12 +17,6 @@ export class VariablesBlockHandler implements BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any> inputs: Record<string, any>
): Promise<BlockOutput> { ): Promise<BlockOutput> {
logger.info(`Executing variables block: ${block.id}`, {
blockName: block.metadata?.name,
inputsKeys: Object.keys(inputs),
variablesInput: inputs.variables,
})
try { try {
if (!ctx.workflowVariables) { if (!ctx.workflowVariables) {
ctx.workflowVariables = {} ctx.workflowVariables = {}
@@ -53,16 +42,6 @@ export class VariablesBlockHandler implements BlockHandler {
} }
} }
logger.info('Variables updated', {
updatedVariables: assignments.map((a) => a.variableName),
allVariables: Object.values(ctx.workflowVariables).map((v: any) => v.name),
updatedValues: Object.entries(ctx.workflowVariables).map(([id, v]: [string, any]) => ({
id,
name: v.name,
value: v.value,
})),
})
const output: Record<string, any> = {} const output: Record<string, any> = {}
for (const assignment of assignments) { for (const assignment of assignments) {
output[assignment.variableName] = assignment.value output[assignment.variableName] = assignment.value

View File

@@ -13,29 +13,25 @@ const logger = createLogger('WaitBlockHandler')
const sleep = async (ms: number, checkCancelled?: () => boolean): Promise<boolean> => { const sleep = async (ms: number, checkCancelled?: () => boolean): Promise<boolean> => {
const isClientSide = typeof window !== 'undefined' const isClientSide = typeof window !== 'undefined'
// Server-side: simple sleep without polling
if (!isClientSide) { if (!isClientSide) {
await new Promise((resolve) => setTimeout(resolve, ms)) await new Promise((resolve) => setTimeout(resolve, ms))
return true return true
} }
// Client-side: check for cancellation every 100ms
const chunkMs = 100 const chunkMs = 100
let elapsed = 0 let elapsed = 0
while (elapsed < ms) { while (elapsed < ms) {
// Check if execution was cancelled
if (checkCancelled?.()) { if (checkCancelled?.()) {
return false // Sleep was interrupted return false
} }
// Sleep for a chunk or remaining time, whichever is smaller
const sleepTime = Math.min(chunkMs, ms - elapsed) const sleepTime = Math.min(chunkMs, ms - elapsed)
await new Promise((resolve) => setTimeout(resolve, sleepTime)) await new Promise((resolve) => setTimeout(resolve, sleepTime))
elapsed += sleepTime elapsed += sleepTime
} }
return true // Sleep completed normally return true
} }
/** /**
@@ -51,34 +47,24 @@ export class WaitBlockHandler implements BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any> inputs: Record<string, any>
): Promise<any> { ): Promise<any> {
logger.info(`Executing Wait block: ${block.id}`, { inputs })
// Parse the wait duration
const timeValue = Number.parseInt(inputs.timeValue || '10', 10) const timeValue = Number.parseInt(inputs.timeValue || '10', 10)
const timeUnit = inputs.timeUnit || 'seconds' const timeUnit = inputs.timeUnit || 'seconds'
// Validate time value
if (Number.isNaN(timeValue) || timeValue <= 0) { if (Number.isNaN(timeValue) || timeValue <= 0) {
throw new Error('Wait amount must be a positive number') throw new Error('Wait amount must be a positive number')
} }
// Calculate wait time in milliseconds let waitMs = timeValue * 1000
let waitMs = timeValue * 1000 // Default to seconds
if (timeUnit === 'minutes') { if (timeUnit === 'minutes') {
waitMs = timeValue * 60 * 1000 waitMs = timeValue * 60 * 1000
} }
// Enforce 10-minute maximum (600,000 ms)
const maxWaitMs = 10 * 60 * 1000 const maxWaitMs = 10 * 60 * 1000
if (waitMs > maxWaitMs) { if (waitMs > maxWaitMs) {
const maxDisplay = timeUnit === 'minutes' ? '10 minutes' : '600 seconds' const maxDisplay = timeUnit === 'minutes' ? '10 minutes' : '600 seconds'
throw new Error(`Wait time exceeds maximum of ${maxDisplay}`) throw new Error(`Wait time exceeds maximum of ${maxDisplay}`)
} }
logger.info(`Waiting for ${waitMs}ms (${timeValue} ${timeUnit})`)
// Actually sleep for the specified duration
// The executor updates context.isCancelled when cancel() is called
const checkCancelled = () => { const checkCancelled = () => {
return (ctx as any).isCancelled === true return (ctx as any).isCancelled === true
} }
@@ -86,14 +72,12 @@ export class WaitBlockHandler implements BlockHandler {
const completed = await sleep(waitMs, checkCancelled) const completed = await sleep(waitMs, checkCancelled)
if (!completed) { if (!completed) {
logger.info('Wait was interrupted by cancellation')
return { return {
waitDuration: waitMs, waitDuration: waitMs,
status: 'cancelled', status: 'cancelled',
} }
} }
logger.info('Wait completed successfully')
return { return {
waitDuration: waitMs, waitDuration: waitMs,
status: 'completed', status: 'completed',

View File

@@ -44,8 +44,7 @@ describe('WorkflowBlockHandler', () => {
metadata: { duration: 0 }, metadata: { duration: 0 },
environmentVariables: {}, environmentVariables: {},
decisions: { router: new Map(), condition: new Map() }, decisions: { router: new Map(), condition: new Map() },
loopIterations: new Map(), loopExecutions: new Map(),
loopItems: new Map(),
executedBlocks: new Set(), executedBlocks: new Set(),
activeExecutionPath: new Set(), activeExecutionPath: new Set(),
completedLoops: new Set(), completedLoops: new Set(),

View File

@@ -72,7 +72,6 @@ export class WorkflowBlockHandler implements BlockHandler {
throw new Error(`Child workflow ${workflowId} not found`) throw new Error(`Child workflow ${workflowId} not found`)
} }
// Get workflow metadata for logging
const { workflows } = useWorkflowRegistry.getState() const { workflows } = useWorkflowRegistry.getState()
const workflowMetadata = workflows[workflowId] const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow' const childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow'
@@ -204,8 +203,6 @@ export class WorkflowBlockHandler implements BlockHandler {
logger.info( logger.info(
`Loaded ${Object.keys(workflowVariables).length} variables for child workflow: ${workflowId}` `Loaded ${Object.keys(workflowVariables).length} variables for child workflow: ${workflowId}`
) )
} else {
logger.debug(`No workflow variables found for child workflow: ${workflowId}`)
} }
return { return {

View File

@@ -3,4 +3,4 @@
* Exports the DAG executor as the default executor * Exports the DAG executor as the default executor
*/ */
export { DAGExecutor as Executor } from './execution/executor' export { DAGExecutor as Executor } from '@/executor/execution/executor'

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/consts' import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/consts'
import type { DAG } from '@/executor/dag/builder'
import type { LoopScope } from '@/executor/execution/state'
import type { BlockStateController } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types' import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { LoopConfigWithNodes } from '@/executor/types/loop' import type { LoopConfigWithNodes } from '@/executor/types/loop'
import { import {
@@ -7,10 +10,8 @@ import {
buildSentinelStartId, buildSentinelStartId,
extractBaseBlockId, extractBaseBlockId,
} from '@/executor/utils/subflow-utils' } from '@/executor/utils/subflow-utils'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedLoop } from '@/serializer/types' import type { SerializedLoop } from '@/serializer/types'
import type { DAG } from '../dag/builder'
import type { ExecutionState, LoopScope } from '../execution/state'
import type { VariableResolver } from '../variables/resolver'
const logger = createLogger('LoopOrchestrator') const logger = createLogger('LoopOrchestrator')
@@ -27,7 +28,7 @@ export interface LoopContinuationResult {
export class LoopOrchestrator { export class LoopOrchestrator {
constructor( constructor(
private dag: DAG, private dag: DAG,
private state: ExecutionState, private state: BlockStateController,
private resolver: VariableResolver private resolver: VariableResolver
) {} ) {}
@@ -44,13 +45,11 @@ export class LoopOrchestrator {
} }
const loopType = loopConfig.loopType const loopType = loopConfig.loopType
logger.debug('Initializing loop scope', { loopId, loopType })
switch (loopType) { switch (loopType) {
case 'for': case 'for':
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
scope.condition = buildLoopIndexCondition(scope.maxIterations) scope.condition = buildLoopIndexCondition(scope.maxIterations)
logger.debug('For loop initialized', { loopId, maxIterations: scope.maxIterations })
break break
case 'forEach': { case 'forEach': {
@@ -59,13 +58,11 @@ export class LoopOrchestrator {
scope.maxIterations = items.length scope.maxIterations = items.length
scope.item = items[0] scope.item = items[0]
scope.condition = buildLoopIndexCondition(scope.maxIterations) scope.condition = buildLoopIndexCondition(scope.maxIterations)
logger.debug('ForEach loop initialized', { loopId, itemCount: items.length })
break break
} }
case 'while': case 'while':
scope.condition = loopConfig.whileCondition scope.condition = loopConfig.whileCondition
logger.debug('While loop initialized', { loopId, condition: scope.condition })
break break
case 'doWhile': case 'doWhile':
@@ -76,14 +73,16 @@ export class LoopOrchestrator {
scope.condition = buildLoopIndexCondition(scope.maxIterations) scope.condition = buildLoopIndexCondition(scope.maxIterations)
} }
scope.skipFirstConditionCheck = true scope.skipFirstConditionCheck = true
logger.debug('DoWhile loop initialized', { loopId, condition: scope.condition })
break break
default: default:
throw new Error(`Unknown loop type: ${loopType}`) throw new Error(`Unknown loop type: ${loopType}`)
} }
this.state.setLoopScope(loopId, scope) if (!ctx.loopExecutions) {
ctx.loopExecutions = new Map()
}
ctx.loopExecutions.set(loopId, scope)
return scope return scope
} }
@@ -93,7 +92,7 @@ export class LoopOrchestrator {
nodeId: string, nodeId: string,
output: NormalizedBlockOutput output: NormalizedBlockOutput
): void { ): void {
const scope = this.state.getLoopScope(loopId) const scope = ctx.loopExecutions?.get(loopId)
if (!scope) { if (!scope) {
logger.warn('Loop scope not found for node output storage', { loopId, nodeId }) logger.warn('Loop scope not found for node output storage', { loopId, nodeId })
return return
@@ -101,16 +100,10 @@ export class LoopOrchestrator {
const baseId = extractBaseBlockId(nodeId) const baseId = extractBaseBlockId(nodeId)
scope.currentIterationOutputs.set(baseId, output) scope.currentIterationOutputs.set(baseId, output)
logger.debug('Stored loop node output', {
loopId,
nodeId: baseId,
iteration: scope.iteration,
outputsCount: scope.currentIterationOutputs.size,
})
} }
evaluateLoopContinuation(ctx: ExecutionContext, loopId: string): LoopContinuationResult { evaluateLoopContinuation(ctx: ExecutionContext, loopId: string): LoopContinuationResult {
const scope = this.state.getLoopScope(loopId) const scope = ctx.loopExecutions?.get(loopId)
if (!scope) { if (!scope) {
logger.error('Loop scope not found during continuation evaluation', { loopId }) logger.error('Loop scope not found during continuation evaluation', { loopId })
return { return {
@@ -120,6 +113,12 @@ export class LoopOrchestrator {
} }
} }
// Check for cancellation
if (ctx.isCancelled) {
logger.info('Loop execution cancelled', { loopId, iteration: scope.iteration })
return this.createExitResult(ctx, loopId, scope)
}
const iterationResults: NormalizedBlockOutput[] = [] const iterationResults: NormalizedBlockOutput[] = []
for (const blockOutput of scope.currentIterationOutputs.values()) { for (const blockOutput of scope.currentIterationOutputs.values()) {
iterationResults.push(blockOutput) iterationResults.push(blockOutput)
@@ -127,11 +126,6 @@ export class LoopOrchestrator {
if (iterationResults.length > 0) { if (iterationResults.length > 0) {
scope.allIterationOutputs.push(iterationResults) scope.allIterationOutputs.push(iterationResults)
logger.debug('Collected iteration results', {
loopId,
iteration: scope.iteration,
resultsCount: iterationResults.length,
})
} }
scope.currentIterationOutputs.clear() scope.currentIterationOutputs.clear()
@@ -140,25 +134,16 @@ export class LoopOrchestrator {
const shouldSkipFirstCheck = scope.skipFirstConditionCheck && isFirstIteration const shouldSkipFirstCheck = scope.skipFirstConditionCheck && isFirstIteration
if (!shouldSkipFirstCheck) { if (!shouldSkipFirstCheck) {
if (!this.evaluateCondition(ctx, scope, scope.iteration + 1)) { if (!this.evaluateCondition(ctx, scope, scope.iteration + 1)) {
logger.debug('Loop condition false for next iteration - exiting', {
loopId,
currentIteration: scope.iteration,
nextIteration: scope.iteration + 1,
})
return this.createExitResult(ctx, loopId, scope) return this.createExitResult(ctx, loopId, scope)
} }
} }
scope.iteration++ scope.iteration++
if (scope.items && scope.iteration < scope.items.length) { if (scope.items && scope.iteration < scope.items.length) {
scope.item = scope.items[scope.iteration] scope.item = scope.items[scope.iteration]
} }
logger.debug('Loop will continue', {
loopId,
nextIteration: scope.iteration,
})
return { return {
shouldContinue: true, shouldContinue: true,
shouldExit: false, shouldExit: false,
@@ -173,13 +158,7 @@ export class LoopOrchestrator {
scope: LoopScope scope: LoopScope
): LoopContinuationResult { ): LoopContinuationResult {
const results = scope.allIterationOutputs const results = scope.allIterationOutputs
ctx.blockStates?.set(loopId, { this.state.setBlockOutput(loopId, { results }, DEFAULTS.EXECUTION_TIME)
output: { results },
executed: true,
executionTime: DEFAULTS.EXECUTION_TIME,
})
logger.debug('Loop exiting', { loopId, totalIterations: scope.iteration })
return { return {
shouldContinue: false, shouldContinue: false,
@@ -210,7 +189,7 @@ export class LoopOrchestrator {
return result return result
} }
clearLoopExecutionState(loopId: string, executedBlocks: Set<string>): void { clearLoopExecutionState(loopId: string): void {
const loopConfig = this.dag.loopConfigs.get(loopId) as LoopConfigWithNodes | undefined const loopConfig = this.dag.loopConfigs.get(loopId) as LoopConfigWithNodes | undefined
if (!loopConfig) { if (!loopConfig) {
logger.warn('Loop config not found for state clearing', { loopId }) logger.warn('Loop config not found for state clearing', { loopId })
@@ -221,16 +200,11 @@ export class LoopOrchestrator {
const sentinelEndId = buildSentinelEndId(loopId) const sentinelEndId = buildSentinelEndId(loopId)
const loopNodes = loopConfig.nodes const loopNodes = loopConfig.nodes
executedBlocks.delete(sentinelStartId) this.state.unmarkExecuted(sentinelStartId)
executedBlocks.delete(sentinelEndId) this.state.unmarkExecuted(sentinelEndId)
for (const loopNodeId of loopNodes) { for (const loopNodeId of loopNodes) {
executedBlocks.delete(loopNodeId) this.state.unmarkExecuted(loopNodeId)
} }
logger.debug('Cleared loop execution state', {
loopId,
nodesCleared: loopNodes.length + 2,
})
} }
restoreLoopEdges(loopId: string): void { restoreLoopEdges(loopId: string): void {
@@ -267,15 +241,13 @@ export class LoopOrchestrator {
} }
} }
} }
logger.debug('Restored loop edges', { loopId, edgesRestored: restoredCount })
} }
getLoopScope(loopId: string): LoopScope | undefined { getLoopScope(ctx: ExecutionContext, loopId: string): LoopScope | undefined {
return this.state.getLoopScope(loopId) return ctx.loopExecutions?.get(loopId)
} }
shouldExecuteLoopNode(nodeId: string, loopId: string, context: ExecutionContext): boolean { shouldExecuteLoopNode(_ctx: ExecutionContext, _nodeId: string, _loopId: string): boolean {
return true return true
} }
@@ -301,29 +273,45 @@ export class LoopOrchestrator {
try { try {
const referencePattern = /<([^>]+)>/g const referencePattern = /<([^>]+)>/g
let evaluatedCondition = condition let evaluatedCondition = condition
const replacements: Record<string, string> = {}
logger.info('Evaluating loop condition', {
originalCondition: condition,
iteration: scope.iteration,
workflowVariables: ctx.workflowVariables,
})
evaluatedCondition = evaluatedCondition.replace(referencePattern, (match) => { evaluatedCondition = evaluatedCondition.replace(referencePattern, (match) => {
const resolved = this.resolver.resolveSingleReference(ctx, '', match, scope) const resolved = this.resolver.resolveSingleReference(ctx, '', match, scope)
logger.info('Resolved variable reference in loop condition', {
reference: match,
resolvedValue: resolved,
resolvedType: typeof resolved,
})
if (resolved !== undefined) { if (resolved !== undefined) {
// For booleans and numbers, return as-is (no quotes)
if (typeof resolved === 'boolean' || typeof resolved === 'number') {
return String(resolved)
}
// For strings that represent booleans, return without quotes
if (typeof resolved === 'string') { if (typeof resolved === 'string') {
replacements[match] = `"${resolved}"` const lower = resolved.toLowerCase().trim()
if (lower === 'true' || lower === 'false') {
return lower
}
return `"${resolved}"` return `"${resolved}"`
} }
replacements[match] = String(resolved) // For other types, stringify them
return String(resolved) return JSON.stringify(resolved)
} }
return match return match
}) })
const result = Boolean(new Function(`return (${evaluatedCondition})`)()) const result = Boolean(new Function(`return (${evaluatedCondition})`)())
logger.debug('Evaluated loop condition', { logger.info('Loop condition evaluation result', {
condition, originalCondition: condition,
replacements,
evaluatedCondition, evaluatedCondition,
result, result,
iteration: scope.iteration,
}) })
return result return result
@@ -345,13 +333,19 @@ export class LoopOrchestrator {
if (typeof items === 'string') { if (typeof items === 'string') {
if (items.startsWith('<') && items.endsWith('>')) { if (items.startsWith('<') && items.endsWith('>')) {
const resolved = this.resolver.resolveSingleReference(ctx, '', items) const resolved = this.resolver.resolveSingleReference(ctx, '', items)
return Array.isArray(resolved) ? resolved : [] if (Array.isArray(resolved)) {
return resolved
}
return []
} }
try { try {
const normalized = items.replace(/'/g, '"') const normalized = items.replace(/'/g, '"')
const parsed = JSON.parse(normalized) const parsed = JSON.parse(normalized)
return Array.isArray(parsed) ? parsed : [] if (Array.isArray(parsed)) {
return parsed
}
return []
} catch (error) { } catch (error) {
logger.error('Failed to parse forEach items', { items, error }) logger.error('Failed to parse forEach items', { items, error })
return [] return []

View File

@@ -1,12 +1,12 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { EDGE } from '@/executor/consts' import { EDGE } from '@/executor/consts'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { BlockExecutor } from '@/executor/execution/block-executor'
import type { BlockStateController } from '@/executor/execution/types'
import type { LoopOrchestrator } from '@/executor/orchestrators/loop'
import type { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types' import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils' import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { DAG, DAGNode } from '../dag/builder'
import type { BlockExecutor } from '../execution/block-executor'
import type { ExecutionState } from '../execution/state'
import type { LoopOrchestrator } from './loop'
import type { ParallelOrchestrator } from './parallel'
const logger = createLogger('NodeExecutionOrchestrator') const logger = createLogger('NodeExecutionOrchestrator')
@@ -19,20 +19,19 @@ export interface NodeExecutionResult {
export class NodeExecutionOrchestrator { export class NodeExecutionOrchestrator {
constructor( constructor(
private dag: DAG, private dag: DAG,
private state: ExecutionState, private state: BlockStateController,
private blockExecutor: BlockExecutor, private blockExecutor: BlockExecutor,
private loopOrchestrator: LoopOrchestrator, private loopOrchestrator: LoopOrchestrator,
private parallelOrchestrator: ParallelOrchestrator private parallelOrchestrator: ParallelOrchestrator
) {} ) {}
async executeNode(nodeId: string, context: any): Promise<NodeExecutionResult> { async executeNode(ctx: ExecutionContext, nodeId: string): Promise<NodeExecutionResult> {
const node = this.dag.nodes.get(nodeId) const node = this.dag.nodes.get(nodeId)
if (!node) { if (!node) {
throw new Error(`Node not found in DAG: ${nodeId}`) throw new Error(`Node not found in DAG: ${nodeId}`)
} }
if (this.state.hasExecuted(nodeId)) { if (this.state.hasExecuted(nodeId)) {
logger.debug('Node already executed, skipping', { nodeId })
const output = this.state.getBlockOutput(nodeId) || {} const output = this.state.getBlockOutput(nodeId) || {}
return { return {
nodeId, nodeId,
@@ -42,13 +41,11 @@ export class NodeExecutionOrchestrator {
} }
const loopId = node.metadata.loopId const loopId = node.metadata.loopId
if (loopId && !this.loopOrchestrator.getLoopScope(loopId)) { if (loopId && !this.loopOrchestrator.getLoopScope(ctx, loopId)) {
logger.debug('Initializing loop scope before first execution', { loopId, nodeId }) this.loopOrchestrator.initializeLoopScope(ctx, loopId)
this.loopOrchestrator.initializeLoopScope(context, loopId)
} }
if (loopId && !this.loopOrchestrator.shouldExecuteLoopNode(nodeId, loopId, context)) { if (loopId && !this.loopOrchestrator.shouldExecuteLoopNode(ctx, nodeId, loopId)) {
logger.debug('Loop node should not execute', { nodeId, loopId })
return { return {
nodeId, nodeId,
output: {}, output: {},
@@ -57,12 +54,7 @@ export class NodeExecutionOrchestrator {
} }
if (node.metadata.isSentinel) { if (node.metadata.isSentinel) {
logger.debug('Executing sentinel node', { const output = this.handleSentinel(ctx, node)
nodeId,
sentinelType: node.metadata.sentinelType,
loopId,
})
const output = this.handleSentinel(node, context)
const isFinalOutput = node.outgoingEdges.size === 0 const isFinalOutput = node.outgoingEdges.size === 0
return { return {
nodeId, nodeId,
@@ -71,8 +63,7 @@ export class NodeExecutionOrchestrator {
} }
} }
logger.debug('Executing node', { nodeId, blockType: node.block.metadata?.id }) const output = await this.blockExecutor.execute(ctx, node, node.block)
const output = await this.blockExecutor.execute(context, node, node.block)
const isFinalOutput = node.outgoingEdges.size === 0 const isFinalOutput = node.outgoingEdges.size === 0
return { return {
nodeId, nodeId,
@@ -81,28 +72,22 @@ export class NodeExecutionOrchestrator {
} }
} }
private handleSentinel(node: DAGNode, context: any): NormalizedBlockOutput { private handleSentinel(ctx: ExecutionContext, node: DAGNode): NormalizedBlockOutput {
const sentinelType = node.metadata.sentinelType const sentinelType = node.metadata.sentinelType
const loopId = node.metadata.loopId const loopId = node.metadata.loopId
if (sentinelType === 'start') {
logger.debug('Sentinel start - loop entry', { nodeId: node.id, loopId }) switch (sentinelType) {
case 'start': {
return { sentinelStart: true } return { sentinelStart: true }
} }
if (sentinelType === 'end') { case 'end': {
logger.debug('Sentinel end - evaluating loop continuation', { nodeId: node.id, loopId })
if (!loopId) { if (!loopId) {
logger.warn('Sentinel end called without loopId') logger.warn('Sentinel end called without loopId')
return { shouldExit: true, selectedRoute: EDGE.LOOP_EXIT } return { shouldExit: true, selectedRoute: EDGE.LOOP_EXIT }
} }
const continuationResult = this.loopOrchestrator.evaluateLoopContinuation(context, loopId) const continuationResult = this.loopOrchestrator.evaluateLoopContinuation(ctx, loopId)
logger.debug('Loop continuation evaluated', {
loopId,
shouldContinue: continuationResult.shouldContinue,
shouldExit: continuationResult.shouldExit,
iteration: continuationResult.currentIteration,
})
if (continuationResult.shouldContinue) { if (continuationResult.shouldContinue) {
return { return {
@@ -112,6 +97,7 @@ export class NodeExecutionOrchestrator {
loopIteration: continuationResult.currentIteration, loopIteration: continuationResult.currentIteration,
} }
} }
return { return {
results: continuationResult.aggregatedResults || [], results: continuationResult.aggregatedResults || [],
shouldContinue: false, shouldContinue: false,
@@ -120,14 +106,17 @@ export class NodeExecutionOrchestrator {
totalIterations: continuationResult.aggregatedResults?.length || 0, totalIterations: continuationResult.aggregatedResults?.length || 0,
} }
} }
default:
logger.warn('Unknown sentinel type', { sentinelType }) logger.warn('Unknown sentinel type', { sentinelType })
return {} return {}
} }
}
async handleNodeCompletion( async handleNodeCompletion(
ctx: ExecutionContext,
nodeId: string, nodeId: string,
output: NormalizedBlockOutput, output: NormalizedBlockOutput
context: any
): Promise<void> { ): Promise<void> {
const node = this.dag.nodes.get(nodeId) const node = this.dag.nodes.get(nodeId)
if (!node) { if (!node) {
@@ -135,74 +124,70 @@ export class NodeExecutionOrchestrator {
return return
} }
logger.debug('Handling node completion', {
nodeId: node.id,
hasLoopId: !!node.metadata.loopId,
isParallelBranch: !!node.metadata.isParallelBranch,
isSentinel: !!node.metadata.isSentinel,
})
const loopId = node.metadata.loopId const loopId = node.metadata.loopId
const isParallelBranch = node.metadata.isParallelBranch const isParallelBranch = node.metadata.isParallelBranch
const isSentinel = node.metadata.isSentinel const isSentinel = node.metadata.isSentinel
if (isSentinel) { if (isSentinel) {
logger.debug('Handling sentinel node', { nodeId: node.id, loopId }) this.handleRegularNodeCompletion(ctx, node, output)
this.handleRegularNodeCompletion(node, output, context)
} else if (loopId) { } else if (loopId) {
logger.debug('Handling loop node', { nodeId: node.id, loopId }) this.handleLoopNodeCompletion(ctx, node, output, loopId)
this.handleLoopNodeCompletion(node, output, loopId, context)
} else if (isParallelBranch) { } else if (isParallelBranch) {
const parallelId = this.findParallelIdForNode(node.id) const parallelId = this.findParallelIdForNode(node.id)
if (parallelId) { if (parallelId) {
logger.debug('Handling parallel node', { nodeId: node.id, parallelId }) this.handleParallelNodeCompletion(ctx, node, output, parallelId)
this.handleParallelNodeCompletion(node, output, parallelId)
} else { } else {
this.handleRegularNodeCompletion(node, output, context) this.handleRegularNodeCompletion(ctx, node, output)
} }
} else { } else {
logger.debug('Handling regular node', { nodeId: node.id }) this.handleRegularNodeCompletion(ctx, node, output)
this.handleRegularNodeCompletion(node, output, context)
} }
} }
private handleLoopNodeCompletion( private handleLoopNodeCompletion(
ctx: ExecutionContext,
node: DAGNode, node: DAGNode,
output: NormalizedBlockOutput, output: NormalizedBlockOutput,
loopId: string, loopId: string
context: ExecutionContext
): void { ): void {
this.loopOrchestrator.storeLoopNodeOutput(context, loopId, node.id, output) this.loopOrchestrator.storeLoopNodeOutput(ctx, loopId, node.id, output)
this.state.setBlockOutput(node.id, output) this.state.setBlockOutput(node.id, output)
} }
private handleParallelNodeCompletion( private handleParallelNodeCompletion(
ctx: ExecutionContext,
node: DAGNode, node: DAGNode,
output: NormalizedBlockOutput, output: NormalizedBlockOutput,
parallelId: string parallelId: string
): void { ): void {
const scope = this.parallelOrchestrator.getParallelScope(parallelId) const scope = this.parallelOrchestrator.getParallelScope(ctx, parallelId)
if (!scope) { if (!scope) {
const totalBranches = node.metadata.branchTotal || 1 const totalBranches = node.metadata.branchTotal || 1
const parallelConfig = this.dag.parallelConfigs.get(parallelId) const parallelConfig = this.dag.parallelConfigs.get(parallelId)
const nodesInParallel = (parallelConfig as any)?.nodes?.length || 1 const nodesInParallel = (parallelConfig as any)?.nodes?.length || 1
this.parallelOrchestrator.initializeParallelScope(parallelId, totalBranches, nodesInParallel) this.parallelOrchestrator.initializeParallelScope(
ctx,
parallelId,
totalBranches,
nodesInParallel
)
} }
const allComplete = this.parallelOrchestrator.handleParallelBranchCompletion( const allComplete = this.parallelOrchestrator.handleParallelBranchCompletion(
ctx,
parallelId, parallelId,
node.id, node.id,
output output
) )
if (allComplete) { if (allComplete) {
this.parallelOrchestrator.aggregateParallelResults(parallelId) this.parallelOrchestrator.aggregateParallelResults(ctx, parallelId)
} }
this.state.setBlockOutput(node.id, output) this.state.setBlockOutput(node.id, output)
} }
private handleRegularNodeCompletion( private handleRegularNodeCompletion(
ctx: ExecutionContext,
node: DAGNode, node: DAGNode,
output: NormalizedBlockOutput, output: NormalizedBlockOutput
context: any
): void { ): void {
this.state.setBlockOutput(node.id, output) this.state.setBlockOutput(node.id, output)
@@ -213,8 +198,7 @@ export class NodeExecutionOrchestrator {
) { ) {
const loopId = node.metadata.loopId const loopId = node.metadata.loopId
if (loopId) { if (loopId) {
logger.debug('Preparing loop for next iteration', { loopId }) this.loopOrchestrator.clearLoopExecutionState(loopId)
this.loopOrchestrator.clearLoopExecutionState(loopId, this.state.executedBlocks)
this.loopOrchestrator.restoreLoopEdges(loopId) this.loopOrchestrator.restoreLoopEdges(loopId)
} }
} }

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import type { NormalizedBlockOutput } from '@/executor/types' import type { DAG } from '@/executor/dag/builder'
import type { ParallelScope } from '@/executor/execution/state'
import type { BlockStateWriter } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { ParallelConfigWithNodes } from '@/executor/types/parallel' import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
import { import {
calculateBranchCount, calculateBranchCount,
@@ -8,8 +11,6 @@ import {
parseDistributionItems, parseDistributionItems,
} from '@/executor/utils/subflow-utils' } from '@/executor/utils/subflow-utils'
import type { SerializedParallel } from '@/serializer/types' import type { SerializedParallel } from '@/serializer/types'
import type { DAG } from '../dag/builder'
import type { ExecutionState, ParallelScope } from '../execution/state'
const logger = createLogger('ParallelOrchestrator') const logger = createLogger('ParallelOrchestrator')
@@ -30,10 +31,11 @@ export interface ParallelAggregationResult {
export class ParallelOrchestrator { export class ParallelOrchestrator {
constructor( constructor(
private dag: DAG, private dag: DAG,
private state: ExecutionState private state: BlockStateWriter
) {} ) {}
initializeParallelScope( initializeParallelScope(
ctx: ExecutionContext,
parallelId: string, parallelId: string,
totalBranches: number, totalBranches: number,
terminalNodesCount = 1 terminalNodesCount = 1
@@ -45,22 +47,20 @@ export class ParallelOrchestrator {
completedCount: 0, completedCount: 0,
totalExpectedNodes: totalBranches * terminalNodesCount, totalExpectedNodes: totalBranches * terminalNodesCount,
} }
this.state.setParallelScope(parallelId, scope) if (!ctx.parallelExecutions) {
logger.debug('Initialized parallel scope', { ctx.parallelExecutions = new Map()
parallelId, }
totalBranches, ctx.parallelExecutions.set(parallelId, scope)
terminalNodesCount,
totalExpectedNodes: scope.totalExpectedNodes,
})
return scope return scope
} }
handleParallelBranchCompletion( handleParallelBranchCompletion(
ctx: ExecutionContext,
parallelId: string, parallelId: string,
nodeId: string, nodeId: string,
output: NormalizedBlockOutput output: NormalizedBlockOutput
): boolean { ): boolean {
const scope = this.state.getParallelScope(parallelId) const scope = ctx.parallelExecutions?.get(parallelId)
if (!scope) { if (!scope) {
logger.warn('Parallel scope not found for branch completion', { parallelId, nodeId }) logger.warn('Parallel scope not found for branch completion', { parallelId, nodeId })
return false return false
@@ -77,27 +77,13 @@ export class ParallelOrchestrator {
} }
scope.branchOutputs.get(branchIndex)!.push(output) scope.branchOutputs.get(branchIndex)!.push(output)
scope.completedCount++ scope.completedCount++
logger.debug('Recorded parallel branch output', {
parallelId,
branchIndex,
nodeId,
completedCount: scope.completedCount,
totalExpected: scope.totalExpectedNodes,
})
const allComplete = scope.completedCount >= scope.totalExpectedNodes const allComplete = scope.completedCount >= scope.totalExpectedNodes
if (allComplete) {
logger.debug('All parallel branches completed', {
parallelId,
totalBranches: scope.totalBranches,
completedNodes: scope.completedCount,
})
}
return allComplete return allComplete
} }
aggregateParallelResults(parallelId: string): ParallelAggregationResult { aggregateParallelResults(ctx: ExecutionContext, parallelId: string): ParallelAggregationResult {
const scope = this.state.getParallelScope(parallelId) const scope = ctx.parallelExecutions?.get(parallelId)
if (!scope) { if (!scope) {
logger.error('Parallel scope not found for aggregation', { parallelId }) logger.error('Parallel scope not found for aggregation', { parallelId })
return { allBranchesComplete: false } return { allBranchesComplete: false }
@@ -111,12 +97,6 @@ export class ParallelOrchestrator {
this.state.setBlockOutput(parallelId, { this.state.setBlockOutput(parallelId, {
results, results,
}) })
logger.debug('Aggregated parallel results', {
parallelId,
totalBranches: scope.totalBranches,
nodesPerBranch: results[0]?.length || 0,
totalOutputs: scope.completedCount,
})
return { return {
allBranchesComplete: true, allBranchesComplete: true,
results, results,
@@ -151,8 +131,8 @@ export class ParallelOrchestrator {
} }
} }
getParallelScope(parallelId: string): ParallelScope | undefined { getParallelScope(ctx: ExecutionContext, parallelId: string): ParallelScope | undefined {
return this.state.getParallelScope(parallelId) return ctx.parallelExecutions?.get(parallelId)
} }
findParallelIdForNode(baseNodeId: string): string | undefined { findParallelIdForNode(baseNodeId: string): string | undefined {

View File

@@ -0,0 +1,73 @@
import { PARALLEL } from '@/executor/consts'
import type { ExecutionContext, LoopPauseScope, ParallelPauseScope } from '@/executor/types'
interface NodeMetadataLike {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
export function generatePauseContextId(
baseBlockId: string,
nodeMetadata: NodeMetadataLike,
loopScope?: LoopPauseScope
): string {
let contextId = baseBlockId
if (typeof nodeMetadata.branchIndex === 'number') {
contextId = `${contextId}${PARALLEL.BRANCH.PREFIX}${nodeMetadata.branchIndex}${PARALLEL.BRANCH.SUFFIX}`
}
if (loopScope) {
contextId = `${contextId}_loop${loopScope.iteration}`
}
return contextId
}
export function buildTriggerBlockId(nodeId: string): string {
if (nodeId.includes('__response')) {
return nodeId.replace('__response', '__trigger')
}
if (nodeId.endsWith('_response')) {
return nodeId.replace(/_response$/, '_trigger')
}
return `${nodeId}__trigger`
}
export function mapNodeMetadataToPauseScopes(
ctx: ExecutionContext,
nodeMetadata: NodeMetadataLike
): {
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
} {
let parallelScope: ParallelPauseScope | undefined
let loopScope: LoopPauseScope | undefined
if (nodeMetadata.parallelId && typeof nodeMetadata.branchIndex === 'number') {
parallelScope = {
parallelId: nodeMetadata.parallelId,
branchIndex: nodeMetadata.branchIndex,
branchTotal: nodeMetadata.branchTotal,
}
}
if (nodeMetadata.loopId) {
const loopExecution = ctx.loopExecutions?.get(nodeMetadata.loopId)
const iteration = loopExecution?.iteration ?? 0
loopScope = {
loopId: nodeMetadata.loopId,
iteration,
}
}
return {
parallelScope,
loopScope,
}
}

View File

@@ -2,9 +2,6 @@ import type { TraceSpan } from '@/lib/logs/types'
import type { BlockOutput } from '@/blocks/types' import type { BlockOutput } from '@/blocks/types'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
/**
* User-facing file object with simplified interface
*/
export interface UserFile { export interface UserFile {
id: string id: string
name: string name: string
@@ -15,14 +12,62 @@ export interface UserFile {
context?: string context?: string
} }
/** export interface ParallelPauseScope {
* Standardized block output format that ensures compatibility with the execution engine. parallelId: string
*/ branchIndex: number
branchTotal?: number
}
export interface LoopPauseScope {
loopId: string
iteration: number
}
export interface PauseMetadata {
contextId: string
blockId: string
response: any
timestamp: string
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
}
export type ResumeStatus = 'paused' | 'resumed' | 'failed' | 'queued' | 'resuming'
export interface PausePoint {
contextId: string
blockId?: string
response: any
registeredAt: string
resumeStatus: ResumeStatus
snapshotReady: boolean
parallelScope?: ParallelPauseScope
loopScope?: LoopPauseScope
resumeLinks?: {
apiUrl: string
uiUrl: string
contextId: string
executionId: string
workflowId: string
}
}
export interface SerializedSnapshot {
snapshot: string
triggerIds: string[]
}
export interface NormalizedBlockOutput { export interface NormalizedBlockOutput {
[key: string]: any [key: string]: any
// Content fields content?: string
content?: string // Text content from LLM responses model?: string
model?: string // Model identifier used for generation
tokens?: { tokens?: {
prompt?: number prompt?: number
completion?: number completion?: number
@@ -32,131 +77,122 @@ export interface NormalizedBlockOutput {
list: any[] list: any[]
count: number count: number
} }
// File fields files?: UserFile[]
files?: UserFile[] // Binary files/attachments from this block
// Path selection fields
selectedPath?: { selectedPath?: {
blockId: string blockId: string
blockType?: string blockType?: string
blockTitle?: string blockTitle?: string
} }
selectedConditionId?: string // ID of selected condition selectedConditionId?: string
conditionResult?: boolean // Whether condition evaluated to true conditionResult?: boolean
// Generic result fields result?: any
result?: any // Generic result value stdout?: string
stdout?: string // Standard output from function execution executionTime?: number
executionTime?: number // Time taken to execute data?: any
// API response fields status?: number
data?: any // Response data from API calls headers?: Record<string, string>
status?: number // HTTP status code error?: string
headers?: Record<string, string> // HTTP headers
// Error handling
error?: string // Error message if block execution failed
// Child workflow introspection (for workflow blocks)
childTraceSpans?: TraceSpan[] childTraceSpans?: TraceSpan[]
childWorkflowName?: string childWorkflowName?: string
_pauseMetadata?: PauseMetadata
} }
/**
* Execution log entry for a single block.
*/
export interface BlockLog { export interface BlockLog {
blockId: string // Unique identifier of the executed block blockId: string
blockName?: string // Display name of the block blockName?: string
blockType?: string // Type of the block (agent, router, etc.) blockType?: string
startedAt: string // ISO timestamp when execution started startedAt: string
endedAt: string // ISO timestamp when execution completed endedAt: string
durationMs: number // Duration of execution in milliseconds durationMs: number
success: boolean // Whether execution completed successfully success: boolean
output?: any // Output data from successful execution output?: any
input?: any // Input data for the block execution input?: any
error?: string // Error message if execution failed error?: string
loopId?: string // Loop ID if this block is part of a loop loopId?: string
parallelId?: string // Parallel ID if this block is part of a parallel parallelId?: string
iterationIndex?: number // Iteration number for loop/parallel blocks iterationIndex?: number
} }
/**
* Timing metadata for workflow execution.
*/
export interface ExecutionMetadata { export interface ExecutionMetadata {
startTime?: string // ISO timestamp when workflow execution started requestId?: string
endTime?: string // ISO timestamp when workflow execution completed workflowId?: string
duration: number // Duration of workflow execution in milliseconds workspaceId?: string
pendingBlocks?: string[] // List of block IDs that are pending execution startTime?: string
isDebugSession?: boolean // Whether the workflow is running in debug mode endTime?: string
context?: ExecutionContext // Runtime context for the workflow duration: number
workflowConnections?: Array<{ source: string; target: string }> // Connections between workflow blocks pendingBlocks?: string[]
isDebugSession?: boolean
context?: ExecutionContext
workflowConnections?: Array<{ source: string; target: string }>
status?: 'running' | 'paused' | 'completed'
pausePoints?: string[]
resumeChain?: {
parentExecutionId?: string
depth: number
}
userId?: string
executionId?: string
triggerType?: string
triggerBlockId?: string
useDraftState?: boolean
resumeFromSnapshot?: boolean
} }
/**
* Current state of a block during workflow execution.
*/
export interface BlockState { export interface BlockState {
output: NormalizedBlockOutput // Current output data from the block output: NormalizedBlockOutput
executed: boolean // Whether the block has been executed executed: boolean
executionTime: number // Time taken to execute in milliseconds executionTime: number
} }
/**
* Runtime context for workflow execution.
*/
export interface ExecutionContext { export interface ExecutionContext {
workflowId: string // Unique identifier for this workflow execution workflowId: string
workspaceId?: string // Workspace ID for file storage scoping workspaceId?: string
executionId?: string // Unique execution ID for file storage scoping executionId?: string
userId?: string // User ID for file storage attribution userId?: string
// Whether this execution is running against deployed state (API/webhook/schedule/chat)
// Manual executions in the builder should leave this undefined/false
isDeployedContext?: boolean isDeployedContext?: boolean
// CONSOLIDATED STATE - Single source of truth for execution state blockStates: ReadonlyMap<string, BlockState>
// Uses shared references with ExecutionState class executedBlocks: ReadonlySet<string>
blockStates: Map<string, BlockState>
executedBlocks: Set<string> // Set of block IDs that have been executed
blockLogs: BlockLog[] // Chronological log of block executions blockLogs: BlockLog[]
metadata: ExecutionMetadata // Timing metadata for the execution metadata: ExecutionMetadata
environmentVariables: Record<string, string> // Environment variables available during execution environmentVariables: Record<string, string>
workflowVariables?: Record<string, any> // Workflow variables available during execution workflowVariables?: Record<string, any>
// Routing decisions for path determination
decisions: { decisions: {
router: Map<string, string> // Router block ID -> Target block ID router: Map<string, string>
condition: Map<string, string> // Condition block ID -> Selected condition ID condition: Map<string, string>
} }
loopIterations: Map<string, number> // Tracks current iteration count for each loop completedLoops: Set<string>
loopItems: Map<string, any> // Tracks current item for forEach loops and parallel distribution
completedLoops: Set<string> // Tracks which loops have completed all iterations loopExecutions?: Map<
string,
{
iteration: number
currentIterationOutputs: Map<string, any>
allIterationOutputs: any[][]
maxIterations?: number
item?: any
items?: any[]
condition?: string
skipFirstConditionCheck?: boolean
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
}
>
// Parallel execution tracking
parallelExecutions?: Map< parallelExecutions?: Map<
string, string,
{ {
parallelCount: number parallelId: string
distributionItems: any[] | Record<string, any> | null totalBranches: number
completedExecutions: number branchOutputs: Map<number, any[]>
executionResults: Map<string, any> completedCount: number
activeIterations: Set<number> totalExpectedNodes: number
currentIteration: number
parallelType?: 'count' | 'collection' parallelType?: 'count' | 'collection'
} }
> >
// Loop execution tracking
loopExecutions?: Map<
string,
{
maxIterations: number
loopType: 'for' | 'forEach'
forEachItems?: any[] | Record<string, any> | null
executionResults: Map<string, any> // iteration_0, iteration_1, etc.
currentIteration: number
}
>
// Mapping for virtual parallel block IDs to their original blocks
parallelBlockMapping?: Map< parallelBlockMapping?: Map<
string, string,
{ {
@@ -166,19 +202,16 @@ export interface ExecutionContext {
} }
> >
// Current virtual block being executed (for parallel iterations)
currentVirtualBlockId?: string currentVirtualBlockId?: string
activeExecutionPath: Set<string> // Set of block IDs in the current execution path activeExecutionPath: Set<string>
workflow?: SerializedWorkflow // Reference to the workflow being executed workflow?: SerializedWorkflow
// Streaming support and output selection stream?: boolean
stream?: boolean // Whether to use streaming responses when available selectedOutputs?: string[]
selectedOutputs?: string[] // IDs of blocks selected for streaming output edges?: Array<{ source: string; target: string }>
edges?: Array<{ source: string; target: string }> // Workflow edge connections
// New context extensions
onStream?: (streamingExecution: StreamingExecution) => Promise<void> onStream?: (streamingExecution: StreamingExecution) => Promise<void>
onBlockStart?: (blockId: string, blockName: string, blockType: string) => Promise<void> onBlockStart?: (blockId: string, blockName: string, blockType: string) => Promise<void>
onBlockComplete?: ( onBlockComplete?: (
@@ -187,45 +220,34 @@ export interface ExecutionContext {
blockType: string, blockType: string,
output: any output: any
) => Promise<void> ) => Promise<void>
// Cancellation support
isCancelled?: boolean
} }
/**
* Complete result from executing a workflow.
*/
export interface ExecutionResult { export interface ExecutionResult {
success: boolean // Whether the workflow executed successfully success: boolean
output: NormalizedBlockOutput // Final output data from the workflow output: NormalizedBlockOutput
error?: string // Error message if execution failed error?: string
logs?: BlockLog[] // Execution logs for all blocks logs?: BlockLog[]
metadata?: ExecutionMetadata metadata?: ExecutionMetadata
status?: 'completed' | 'paused'
pausePoints?: PausePoint[]
snapshotSeed?: SerializedSnapshot
_streamingMetadata?: { _streamingMetadata?: {
// Internal metadata for streaming execution
loggingSession: any loggingSession: any
processedInput: any processedInput: any
} }
} }
/**
* Streaming execution result combining a readable stream with execution metadata.
* This allows us to stream content to the UI while still capturing all execution logs.
*/
export interface StreamingExecution { export interface StreamingExecution {
stream: ReadableStream // The streaming response for the UI to consume stream: ReadableStream
execution: ExecutionResult & { isStreaming?: boolean } // The complete execution data for logging purposes execution: ExecutionResult & { isStreaming?: boolean }
} }
/**
* Interface for a block executor component.
*/
export interface BlockExecutor { export interface BlockExecutor {
/**
* Determines if this executor can process the given block.
*/
canExecute(block: SerializedBlock): boolean canExecute(block: SerializedBlock): boolean
/**
* Executes the block with the given inputs and context.
*/
execute( execute(
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any>, inputs: Record<string, any>,
@@ -233,17 +255,7 @@ export interface BlockExecutor {
): Promise<BlockOutput> ): Promise<BlockOutput>
} }
/**
* Interface for block handlers that execute specific block types.
* Each handler is responsible for executing a particular type of block.
*/
export interface BlockHandler { export interface BlockHandler {
/**
* Determines if this handler can process the given block.
*
* @param block - Block to check
* @returns True if this handler can process the block
*/
canHandle(block: SerializedBlock): boolean canHandle(block: SerializedBlock): boolean
execute( execute(
@@ -251,39 +263,43 @@ export interface BlockHandler {
block: SerializedBlock, block: SerializedBlock,
inputs: Record<string, any> inputs: Record<string, any>
): Promise<BlockOutput | StreamingExecution> ): Promise<BlockOutput | StreamingExecution>
executeWithNode?: (
ctx: ExecutionContext,
block: SerializedBlock,
inputs: Record<string, any>,
nodeMetadata: {
nodeId: string
loopId?: string
parallelId?: string
branchIndex?: number
branchTotal?: number
}
) => Promise<BlockOutput | StreamingExecution>
} }
/**
* Definition of a tool that can be invoked by blocks.
*
* @template P - Parameter type for the tool
* @template O - Output type from the tool
*/
export interface Tool<P = any, O = Record<string, any>> { export interface Tool<P = any, O = Record<string, any>> {
id: string // Unique identifier for the tool id: string
name: string // Display name of the tool name: string
description: string // Description of what the tool does description: string
version: string // Version string for the tool version: string
// Parameter definitions for the tool
params: { params: {
[key: string]: { [key: string]: {
type: string // Data type of the parameter type: string
required?: boolean // Whether the parameter is required required?: boolean
description?: string // Description of the parameter description?: string
default?: any // Default value if not provided default?: any
} }
} }
// HTTP request configuration for API tools
request?: { request?: {
url?: string | ((params: P) => string) // URL or function to generate URL url?: string | ((params: P) => string)
method?: string // HTTP method to use method?: string
headers?: (params: P) => Record<string, string> // Function to generate request headers headers?: (params: P) => Record<string, string>
body?: (params: P) => Record<string, any> // Function to generate request body body?: (params: P) => Record<string, any>
} }
// Function to transform API response to tool output
transformResponse?: (response: any) => Promise<{ transformResponse?: (response: any) => Promise<{
success: boolean success: boolean
output: O output: O
@@ -291,16 +307,10 @@ export interface Tool<P = any, O = Record<string, any>> {
}> }>
} }
/**
* Registry of available tools indexed by ID.
*/
export interface ToolRegistry { export interface ToolRegistry {
[key: string]: Tool [key: string]: Tool
} }
/**
* Interface for a stream processor that can process a stream based on a response format.
*/
export interface ResponseFormatStreamProcessor { export interface ResponseFormatStreamProcessor {
processStream( processStream(
originalStream: ReadableStream, originalStream: ReadableStream,

View File

@@ -5,14 +5,14 @@ export interface BlockDataCollection {
blockNameMapping: Record<string, string> blockNameMapping: Record<string, string>
} }
export function collectBlockData(context: ExecutionContext): BlockDataCollection { export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record<string, any> = {} const blockData: Record<string, any> = {}
const blockNameMapping: Record<string, string> = {} const blockNameMapping: Record<string, string> = {}
for (const [id, state] of context.blockStates.entries()) { for (const [id, state] of ctx.blockStates.entries()) {
if (state.output !== undefined) { if (state.output !== undefined) {
blockData[id] = state.output blockData[id] = state.output
const workflowBlock = context.workflow?.blocks?.find((b) => b.id === id) const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
if (workflowBlock?.metadata?.name) { if (workflowBlock?.metadata?.name) {
blockNameMapping[workflowBlock.metadata.name] = id blockNameMapping[workflowBlock.metadata.name] = id
const normalized = workflowBlock.metadata.name.replace(/\s+/g, '').toLowerCase() const normalized = workflowBlock.metadata.name.replace(/\s+/g, '').toLowerCase()

View File

@@ -57,7 +57,6 @@ export class ConnectionUtils {
scopeNodes.includes(conn.source) scopeNodes.includes(conn.source)
) )
// Has external connections if total incoming > internal connections
return incomingConnections.length > internalConnections.length return incomingConnections.length > internalConnections.length
} }
@@ -74,10 +73,9 @@ export class ConnectionUtils {
ConnectionUtils.getInternalConnections(nodeId, scopeNodes, connections).length > 0 ConnectionUtils.getInternalConnections(nodeId, scopeNodes, connections).length > 0
if (hasInternalConnections) { if (hasInternalConnections) {
return false // Has internal connections, not an entry point return false
} }
// Only entry point if it has external connections (not completely unconnected)
return ConnectionUtils.hasExternalConnections(nodeId, scopeNodes, connections) return ConnectionUtils.hasExternalConnections(nodeId, scopeNodes, connections)
} }
} }

View File

@@ -25,7 +25,6 @@ export class FileToolProcessor {
const processedOutput = { ...toolOutput } const processedOutput = { ...toolOutput }
// Process each output that's marked as file or file[]
for (const [outputKey, outputDef] of Object.entries(toolConfig.outputs)) { for (const [outputKey, outputDef] of Object.entries(toolConfig.outputs)) {
if (!FileToolProcessor.isFileOutput(outputDef.type)) { if (!FileToolProcessor.isFileOutput(outputDef.type)) {
continue continue
@@ -101,7 +100,33 @@ export class FileToolProcessor {
context: ExecutionContext context: ExecutionContext
): Promise<UserFile> { ): Promise<UserFile> {
try { try {
if (fileData.url) { let buffer: Buffer | null = null
if (Buffer.isBuffer(fileData.data)) {
buffer = fileData.data
} else if (
fileData.data &&
typeof fileData.data === 'object' &&
'type' in fileData.data &&
'data' in fileData.data
) {
const serializedBuffer = fileData.data as { type: string; data: number[] }
if (serializedBuffer.type === 'Buffer' && Array.isArray(serializedBuffer.data)) {
buffer = Buffer.from(serializedBuffer.data)
} else {
throw new Error(`Invalid serialized buffer format for ${fileData.name}`)
}
} else if (typeof fileData.data === 'string' && fileData.data) {
let base64Data = fileData.data
if (base64Data.includes('-') || base64Data.includes('_')) {
base64Data = base64Data.replace(/-/g, '+').replace(/_/g, '/')
}
buffer = Buffer.from(base64Data, 'base64')
}
if (!buffer && fileData.url) {
const response = await fetch(fileData.url) const response = await fetch(fileData.url)
if (!response.ok) { if (!response.ok) {
@@ -109,8 +134,10 @@ export class FileToolProcessor {
} }
const arrayBuffer = await response.arrayBuffer() const arrayBuffer = await response.arrayBuffer()
const buffer = Buffer.from(arrayBuffer) buffer = Buffer.from(arrayBuffer)
}
if (buffer) {
if (buffer.length === 0) { if (buffer.length === 0) {
throw new Error(`File '${fileData.name}' has zero bytes`) throw new Error(`File '${fileData.name}' has zero bytes`)
} }
@@ -128,6 +155,12 @@ export class FileToolProcessor {
) )
} }
if (!fileData.data) {
throw new Error(
`File data for '${fileData.name}' must have either 'data' (Buffer/base64) or 'url' property`
)
}
return uploadFileFromRawData( return uploadFileFromRawData(
{ {
name: fileData.name, name: fileData.name,

View File

@@ -11,9 +11,6 @@ export function parseJSON<T>(value: unknown, fallback: T): T {
try { try {
return JSON.parse(value.trim()) return JSON.parse(value.trim())
} catch (error) { } catch (error) {
logger.debug('Failed to parse JSON, using fallback', {
error: error instanceof Error ? error.message : String(error),
})
return fallback return fallback
} }
} }

View File

@@ -1,14 +1,14 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { BlockType, REFERENCE } from '@/executor/consts' import { BlockType, REFERENCE } from '@/executor/consts'
import type { ExecutionState, LoopScope } from '@/executor/execution/state'
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext } from '@/executor/types'
import { BlockResolver } from '@/executor/variables/resolvers/block'
import { EnvResolver } from '@/executor/variables/resolvers/env'
import { LoopResolver } from '@/executor/variables/resolvers/loop'
import { ParallelResolver } from '@/executor/variables/resolvers/parallel'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import { WorkflowResolver } from '@/executor/variables/resolvers/workflow'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
import type { ExecutionState, LoopScope } from '../execution/state'
import { BlockResolver } from './resolvers/block'
import { EnvResolver } from './resolvers/env'
import { LoopResolver } from './resolvers/loop'
import { ParallelResolver } from './resolvers/parallel'
import type { ResolutionContext, Resolver } from './resolvers/reference'
import { WorkflowResolver } from './resolvers/workflow'
const logger = createLogger('VariableResolver') const logger = createLogger('VariableResolver')
@@ -17,8 +17,8 @@ export class VariableResolver {
private blockResolver: BlockResolver private blockResolver: BlockResolver
constructor( constructor(
private workflow: SerializedWorkflow, workflow: SerializedWorkflow,
private workflowVariables: Record<string, any>, workflowVariables: Record<string, any>,
private state: ExecutionState private state: ExecutionState
) { ) {
this.blockResolver = new BlockResolver(workflow) this.blockResolver = new BlockResolver(workflow)
@@ -93,6 +93,20 @@ export class VariableResolver {
reference: string, reference: string,
loopScope?: LoopScope loopScope?: LoopScope
): any { ): any {
if (typeof reference === 'string') {
const trimmed = reference.trim()
if (/^<[^<>]+>$/.test(trimmed)) {
const resolutionContext: ResolutionContext = {
executionContext: ctx,
executionState: this.state,
currentNodeId,
loopScope,
}
return this.resolveReference(trimmed, resolutionContext)
}
}
return this.resolveValue(ctx, currentNodeId, reference, loopScope) return this.resolveValue(ctx, currentNodeId, reference, loopScope)
} }
@@ -182,10 +196,6 @@ export class VariableResolver {
return result return result
} }
/**
* Resolves template string but without condition-specific formatting.
* Used when resolving condition values that are already parsed from JSON.
*/
private resolveTemplateWithoutConditionFormatting( private resolveTemplateWithoutConditionFormatting(
ctx: ExecutionContext, ctx: ExecutionContext,
currentNodeId: string, currentNodeId: string,
@@ -215,17 +225,13 @@ export class VariableResolver {
return match return match
} }
// Format value for JavaScript evaluation
// Strings need to be quoted, objects need JSON.stringify
if (typeof resolved === 'string') { if (typeof resolved === 'string') {
// Escape backslashes first, then single quotes, then wrap in single quotes
const escaped = resolved.replace(/\\/g, '\\\\').replace(/'/g, "\\'") const escaped = resolved.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
return `'${escaped}'` return `'${escaped}'`
} }
if (typeof resolved === 'object' && resolved !== null) { if (typeof resolved === 'object' && resolved !== null) {
return JSON.stringify(resolved) return JSON.stringify(resolved)
} }
// For numbers, booleans, null, undefined - use as-is
return String(resolved) return String(resolved)
} catch (error) { } catch (error) {
replacementError = error instanceof Error ? error : new Error(String(error)) replacementError = error instanceof Error ? error : new Error(String(error))
@@ -249,11 +255,6 @@ export class VariableResolver {
for (const resolver of this.resolvers) { for (const resolver of this.resolvers) {
if (resolver.canResolve(reference)) { if (resolver.canResolve(reference)) {
const result = resolver.resolve(reference, context) const result = resolver.resolve(reference, context)
logger.debug('Reference resolved', {
reference,
resolver: resolver.constructor.name,
result,
})
return result return result
} }
} }

View File

@@ -1,10 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, SPECIAL_REFERENCE_PREFIXES } from '@/executor/consts' import { isReference, parseReferencePath, SPECIAL_REFERENCE_PREFIXES } from '@/executor/consts'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
import { normalizeBlockName } from '@/stores/workflows/utils' import { normalizeBlockName } from '@/stores/workflows/utils'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('BlockResolver')
export class BlockResolver implements Resolver { export class BlockResolver implements Resolver {
private blockByNormalizedName: Map<string, string> private blockByNormalizedName: Map<string, string>
@@ -38,25 +35,13 @@ export class BlockResolver implements Resolver {
return undefined return undefined
} }
const [blockName, ...pathParts] = parts const [blockName, ...pathParts] = parts
logger.debug('Resolving block reference', {
reference,
blockName,
pathParts,
})
const blockId = this.findBlockIdByName(blockName) const blockId = this.findBlockIdByName(blockName)
if (!blockId) { if (!blockId) {
logger.debug('Block not found by name, skipping resolution', { blockName, reference })
return undefined return undefined
} }
const output = this.getBlockOutput(blockId, context) const output = this.getBlockOutput(blockId, context)
logger.debug('Block output retrieved', {
blockName,
blockId,
hasOutput: !!output,
outputKeys: output ? Object.keys(output) : [],
})
if (!output) { if (!output) {
throw new Error(`No state found for block "${blockName}"`) throw new Error(`No state found for block "${blockName}"`)
@@ -74,16 +59,11 @@ export class BlockResolver implements Resolver {
) )
} }
logger.debug('Navigated path result', {
blockName,
pathParts,
result,
})
return result return result
} }
private getBlockOutput(blockId: string, context: ResolutionContext): any { private getBlockOutput(blockId: string, context: ResolutionContext): any {
const stateOutput = context.executionState.getBlockOutput(blockId) const stateOutput = context.executionState.getBlockOutput(blockId, context.currentNodeId)
if (stateOutput !== undefined) { if (stateOutput !== undefined) {
return stateOutput return stateOutput
} }
@@ -164,7 +144,7 @@ export class BlockResolver implements Resolver {
return value return value
} }
formatValueForBlock( public formatValueForBlock(
value: any, value: any,
blockType: string | undefined, blockType: string | undefined,
isInTemplateLiteral = false isInTemplateLiteral = false

View File

@@ -1,6 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { extractEnvVarName, isEnvVarReference } from '@/executor/consts' import { extractEnvVarName, isEnvVarReference } from '@/executor/consts'
import type { ResolutionContext, Resolver } from './reference' import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
const logger = createLogger('EnvResolver') const logger = createLogger('EnvResolver')
@@ -14,7 +14,6 @@ export class EnvResolver implements Resolver {
const value = context.executionContext.environmentVariables?.[varName] const value = context.executionContext.environmentVariables?.[varName]
if (value === undefined) { if (value === undefined) {
logger.debug('Environment variable not found, returning original reference', { varName })
return reference return reference
} }
return value return value

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts' import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import { extractBaseBlockId } from '@/executor/utils/subflow-utils' import { extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('LoopResolver') const logger = createLogger('LoopResolver')
@@ -34,10 +34,9 @@ export class LoopResolver implements Resolver {
if (!loopScope) { if (!loopScope) {
const loopId = this.findLoopForBlock(context.currentNodeId) const loopId = this.findLoopForBlock(context.currentNodeId)
if (!loopId) { if (!loopId) {
logger.debug('Block not in a loop', { nodeId: context.currentNodeId })
return undefined return undefined
} }
loopScope = context.executionState.getLoopScope(loopId) loopScope = context.executionContext.loopExecutions?.get(loopId)
} }
if (!loopScope) { if (!loopScope) {

View File

@@ -1,8 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts' import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils' import { extractBaseBlockId, extractBranchIndex } from '@/executor/utils/subflow-utils'
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
import type { ResolutionContext, Resolver } from './reference'
const logger = createLogger('ParallelResolver') const logger = createLogger('ParallelResolver')
@@ -31,7 +31,6 @@ export class ParallelResolver implements Resolver {
const [_, property] = parts const [_, property] = parts
const parallelId = this.findParallelForBlock(context.currentNodeId) const parallelId = this.findParallelForBlock(context.currentNodeId)
if (!parallelId) { if (!parallelId) {
logger.debug('Block not in a parallel', { nodeId: context.currentNodeId })
return undefined return undefined
} }
@@ -43,7 +42,6 @@ export class ParallelResolver implements Resolver {
const branchIndex = extractBranchIndex(context.currentNodeId) const branchIndex = extractBranchIndex(context.currentNodeId)
if (branchIndex === null) { if (branchIndex === null) {
logger.debug('Node ID does not have branch index', { nodeId: context.currentNodeId })
return undefined return undefined
} }

View File

@@ -1,5 +1,5 @@
import type { ExecutionState, LoopScope } from '@/executor/execution/state'
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext } from '@/executor/types'
import type { ExecutionState, LoopScope } from '../../execution/state'
export interface ResolutionContext { export interface ResolutionContext {
executionContext: ExecutionContext executionContext: ExecutionContext
executionState: ExecutionState executionState: ExecutionState

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { VariableManager } from '@/lib/variables/variable-manager'
import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts' import { isReference, parseReferencePath, REFERENCE } from '@/executor/consts'
import type { ResolutionContext, Resolver } from './reference' import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
const logger = createLogger('WorkflowResolver') const logger = createLogger('WorkflowResolver')
@@ -28,22 +29,24 @@ export class WorkflowResolver implements Resolver {
const [_, variableName] = parts const [_, variableName] = parts
if (context.executionContext.workflowVariables) { const workflowVars = context.executionContext.workflowVariables || this.workflowVariables
for (const varObj of Object.values(context.executionContext.workflowVariables)) {
for (const varObj of Object.values(workflowVars)) {
const v = varObj as any const v = varObj as any
if (v.name === variableName || v.id === variableName) { if (v && (v.name === variableName || v.id === variableName)) {
const normalizedType = (v.type === 'string' ? 'plain' : v.type) || 'plain'
try {
return VariableManager.resolveForExecution(v.value, normalizedType)
} catch (error) {
logger.warn('Failed to resolve workflow variable, returning raw value', {
variableName,
error: (error as Error).message,
})
return v.value return v.value
} }
} }
} }
for (const varObj of Object.values(this.workflowVariables)) {
const v = varObj as any
if (v.name === variableName || v.id === variableName) {
return v.value
}
}
logger.debug('Workflow variable not found', { variableName })
return undefined return undefined
} }
} }

View File

@@ -4,7 +4,6 @@ import { useSession } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs' import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { getBlock } from '@/blocks' import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useSocket } from '@/contexts/socket-context' import { useSocket } from '@/contexts/socket-context'
import { useUndoRedo } from '@/hooks/use-undo-redo' import { useUndoRedo } from '@/hooks/use-undo-redo'
import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store' import { registerEmitFunctions, useOperationQueue } from '@/stores/operation-queue/store'
@@ -778,9 +777,7 @@ export function useCollaborativeWorkflow() {
// Get outputs based on trigger mode // Get outputs based on trigger mode
const isTriggerMode = triggerMode || false const isTriggerMode = triggerMode || false
const outputs = isTriggerMode const outputs = getBlockOutputs(type, subBlocks, isTriggerMode)
? getBlockOutputs(type, subBlocks, isTriggerMode)
: resolveOutputType(blockConfig.outputs)
const completeBlockData = { const completeBlockData = {
id, id,

View File

@@ -9,7 +9,6 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/custom-tools-persi
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { validateWorkflowState } from '@/lib/workflows/validation' import { validateWorkflowState } from '@/lib/workflows/validation'
import { getAllBlocks } from '@/blocks/registry' import { getAllBlocks } from '@/blocks/registry'
import { resolveOutputType } from '@/blocks/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
interface EditWorkflowOperation { interface EditWorkflowOperation {
@@ -134,9 +133,7 @@ function createBlockFromParams(blockId: string, params: any, parentId?: string):
subBlocks[key] = { id: key, type: 'short-input', value: value } subBlocks[key] = { id: key, type: 'short-input', value: value }
}) })
} }
outputs = triggerMode outputs = getBlockOutputs(params.type, subBlocks, triggerMode)
? getBlockOutputs(params.type, subBlocks, triggerMode)
: resolveOutputType(blockConfig.outputs)
} else { } else {
outputs = {} outputs = {}
} }

View File

@@ -42,6 +42,67 @@ export interface ToolCall {
const logger = createLogger('ExecutionLogger') const logger = createLogger('ExecutionLogger')
export class ExecutionLogger implements IExecutionLoggerService { export class ExecutionLogger implements IExecutionLoggerService {
private mergeTraceSpans(existing: TraceSpan[], additional: TraceSpan[]): TraceSpan[] {
// If no existing spans, just return additional
if (!existing || existing.length === 0) return additional
if (!additional || additional.length === 0) return existing
// Find the root "Workflow Execution" span in both arrays
const existingRoot = existing.find((s) => s.name === 'Workflow Execution')
const additionalRoot = additional.find((s) => s.name === 'Workflow Execution')
if (!existingRoot || !additionalRoot) {
// If we can't find both roots, just concatenate (fallback)
return [...existing, ...additional]
}
// Calculate the full duration from original start to resume end
const startTime = existingRoot.startTime
const endTime = additionalRoot.endTime || existingRoot.endTime
const fullDuration =
startTime && endTime
? new Date(endTime).getTime() - new Date(startTime).getTime()
: (existingRoot.duration || 0) + (additionalRoot.duration || 0)
// Merge the children of the workflow execution spans
const mergedRoot = {
...existingRoot,
children: [...(existingRoot.children || []), ...(additionalRoot.children || [])],
endTime,
duration: fullDuration,
}
// Return array with merged root plus any other top-level spans
const otherExisting = existing.filter((s) => s.name !== 'Workflow Execution')
const otherAdditional = additional.filter((s) => s.name !== 'Workflow Execution')
return [mergedRoot, ...otherExisting, ...otherAdditional]
}
private mergeCostModels(
existing: Record<string, any>,
additional: Record<string, any>
): Record<string, any> {
const merged = { ...existing }
for (const [model, costs] of Object.entries(additional)) {
if (merged[model]) {
merged[model] = {
input: (merged[model].input || 0) + (costs.input || 0),
output: (merged[model].output || 0) + (costs.output || 0),
total: (merged[model].total || 0) + (costs.total || 0),
tokens: {
prompt: (merged[model].tokens?.prompt || 0) + (costs.tokens?.prompt || 0),
completion: (merged[model].tokens?.completion || 0) + (costs.tokens?.completion || 0),
total: (merged[model].tokens?.total || 0) + (costs.tokens?.total || 0),
},
}
} else {
merged[model] = costs
}
}
return merged
}
async startWorkflowExecution(params: { async startWorkflowExecution(params: {
workflowId: string workflowId: string
executionId: string executionId: string
@@ -161,6 +222,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
finalOutput: BlockOutputData finalOutput: BlockOutputData
traceSpans?: TraceSpan[] traceSpans?: TraceSpan[]
workflowInput?: any workflowInput?: any
isResume?: boolean // If true, merge with existing data instead of replacing
}): Promise<WorkflowExecutionLog> { }): Promise<WorkflowExecutionLog> {
const { const {
executionId, executionId,
@@ -170,9 +232,21 @@ export class ExecutionLogger implements IExecutionLoggerService {
finalOutput, finalOutput,
traceSpans, traceSpans,
workflowInput, workflowInput,
isResume,
} = params } = params
logger.debug(`Completing workflow execution ${executionId}`) logger.debug(`Completing workflow execution ${executionId}`, { isResume })
// If this is a resume, fetch the existing log to merge data
let existingLog: any = null
if (isResume) {
const [existing] = await db
.select()
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.executionId, executionId))
.limit(1)
existingLog = existing
}
// Determine if workflow failed by checking trace spans for errors // Determine if workflow failed by checking trace spans for errors
const hasErrors = traceSpans?.some((span: any) => { const hasErrors = traceSpans?.some((span: any) => {
@@ -191,29 +265,34 @@ export class ExecutionLogger implements IExecutionLoggerService {
// Extract files from trace spans, final output, and workflow input // Extract files from trace spans, final output, and workflow input
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput, workflowInput) const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput, workflowInput)
const filteredTraceSpans = filterForDisplay(traceSpans) // For resume executions, rebuild trace spans from the aggregated logs
const mergedTraceSpans = isResume
? traceSpans && traceSpans.length > 0
? traceSpans
: existingLog?.executionData?.traceSpans || []
: traceSpans
const filteredTraceSpans = filterForDisplay(mergedTraceSpans)
const filteredFinalOutput = filterForDisplay(finalOutput) const filteredFinalOutput = filterForDisplay(finalOutput)
const redactedTraceSpans = redactApiKeys(filteredTraceSpans) const redactedTraceSpans = redactApiKeys(filteredTraceSpans)
const redactedFinalOutput = redactApiKeys(filteredFinalOutput) const redactedFinalOutput = redactApiKeys(filteredFinalOutput)
const [updatedLog] = await db // Merge costs if resuming
.update(workflowExecutionLogs) const existingCost = isResume && existingLog?.cost ? existingLog.cost : null
.set({ const mergedCost = existingCost
level, ? {
endedAt: new Date(endedAt), // For resume, add only the model costs, NOT the base execution charge again
totalDurationMs, total: (existingCost.total || 0) + costSummary.modelCost,
files: executionFiles.length > 0 ? executionFiles : null, input: (existingCost.input || 0) + costSummary.totalInputCost,
executionData: { output: (existingCost.output || 0) + costSummary.totalOutputCost,
traceSpans: redactedTraceSpans, tokens: {
finalOutput: redactedFinalOutput, prompt: (existingCost.tokens?.prompt || 0) + costSummary.totalPromptTokens,
tokenBreakdown: { completion: (existingCost.tokens?.completion || 0) + costSummary.totalCompletionTokens,
prompt: costSummary.totalPromptTokens, total: (existingCost.tokens?.total || 0) + costSummary.totalTokens,
completion: costSummary.totalCompletionTokens,
total: costSummary.totalTokens,
}, },
models: costSummary.models, models: this.mergeCostModels(existingCost.models || {}, costSummary.models),
}, }
cost: { : {
total: costSummary.totalCost, total: costSummary.totalCost,
input: costSummary.totalInputCost, input: costSummary.totalInputCost,
output: costSummary.totalOutputCost, output: costSummary.totalOutputCost,
@@ -223,7 +302,36 @@ export class ExecutionLogger implements IExecutionLoggerService {
total: costSummary.totalTokens, total: costSummary.totalTokens,
}, },
models: costSummary.models, models: costSummary.models,
}
// Merge files if resuming
const existingFiles = isResume && existingLog?.files ? existingLog.files : []
const mergedFiles = [...existingFiles, ...executionFiles]
// Calculate the actual total duration for resume executions
const actualTotalDuration =
isResume && existingLog?.startedAt
? new Date(endedAt).getTime() - new Date(existingLog.startedAt).getTime()
: totalDurationMs
const [updatedLog] = await db
.update(workflowExecutionLogs)
.set({
level,
endedAt: new Date(endedAt),
totalDurationMs: actualTotalDuration,
files: mergedFiles.length > 0 ? mergedFiles : null,
executionData: {
traceSpans: redactedTraceSpans,
finalOutput: redactedFinalOutput,
tokenBreakdown: {
prompt: mergedCost.tokens.prompt,
completion: mergedCost.tokens.completion,
total: mergedCost.tokens.total,
}, },
models: mergedCost.models,
},
cost: mergedCost,
}) })
.where(eq(workflowExecutionLogs.executionId, executionId)) .where(eq(workflowExecutionLogs.executionId, executionId))
.returning() .returning()

View File

@@ -21,6 +21,7 @@ export interface SessionStartParams {
workspaceId?: string workspaceId?: string
variables?: Record<string, string> variables?: Record<string, string>
triggerData?: Record<string, unknown> triggerData?: Record<string, unknown>
skipLogCreation?: boolean // For resume executions - reuse existing log entry
} }
export interface SessionCompleteParams { export interface SessionCompleteParams {
@@ -49,6 +50,7 @@ export class LoggingSession {
private trigger?: ExecutionTrigger private trigger?: ExecutionTrigger
private environment?: ExecutionEnvironment private environment?: ExecutionEnvironment
private workflowState?: WorkflowState private workflowState?: WorkflowState
private isResume = false // Track if this is a resume execution
constructor( constructor(
workflowId: string, workflowId: string,
@@ -63,7 +65,7 @@ export class LoggingSession {
} }
async start(params: SessionStartParams = {}): Promise<void> { async start(params: SessionStartParams = {}): Promise<void> {
const { userId, workspaceId, variables, triggerData } = params const { userId, workspaceId, variables, triggerData, skipLogCreation } = params
try { try {
this.trigger = createTriggerObject(this.triggerType, triggerData) this.trigger = createTriggerObject(this.triggerType, triggerData)
@@ -76,6 +78,8 @@ export class LoggingSession {
) )
this.workflowState = await loadWorkflowStateForExecution(this.workflowId) this.workflowState = await loadWorkflowStateForExecution(this.workflowId)
// Only create a new log entry if not resuming
if (!skipLogCreation) {
await executionLogger.startWorkflowExecution({ await executionLogger.startWorkflowExecution({
workflowId: this.workflowId, workflowId: this.workflowId,
executionId: this.executionId, executionId: this.executionId,
@@ -87,6 +91,14 @@ export class LoggingSession {
if (this.requestId) { if (this.requestId) {
logger.debug(`[${this.requestId}] Started logging for execution ${this.executionId}`) logger.debug(`[${this.requestId}] Started logging for execution ${this.executionId}`)
} }
} else {
this.isResume = true // Mark as resume
if (this.requestId) {
logger.debug(
`[${this.requestId}] Resuming logging for existing execution ${this.executionId}`
)
}
}
} catch (error) { } catch (error) {
if (this.requestId) { if (this.requestId) {
logger.error(`[${this.requestId}] Failed to start logging:`, error) logger.error(`[${this.requestId}] Failed to start logging:`, error)
@@ -122,6 +134,7 @@ export class LoggingSession {
finalOutput: finalOutput || {}, finalOutput: finalOutput || {},
traceSpans: traceSpans || [], traceSpans: traceSpans || [],
workflowInput, workflowInput,
isResume: this.isResume,
}) })
// Track workflow execution outcome // Track workflow execution outcome

View File

@@ -167,6 +167,29 @@ export function getBlockOutputs(
return getUnifiedStartOutputs(subBlocks) return getUnifiedStartOutputs(subBlocks)
} }
if (blockType === 'approval') {
// Start with only uiUrl (apiUrl commented out - not accessible as output)
const pauseResumeOutputs: Record<string, any> = {
uiUrl: { type: 'string', description: 'Resume UI URL' },
// apiUrl: { type: 'string', description: 'Resume API URL' }, // Commented out - not accessible as output
}
const normalizedInputFormat = normalizeInputFormatValue(subBlocks?.inputFormat?.value)
// Add each input format field as a top-level output
for (const field of normalizedInputFormat) {
const fieldName = field?.name?.trim()
if (!fieldName) continue
pauseResumeOutputs[fieldName] = {
type: (field?.type || 'any') as any,
description: `Field from input format`,
}
}
return pauseResumeOutputs
}
if (startPath === StartBlockPath.LEGACY_STARTER) { if (startPath === StartBlockPath.LEGACY_STARTER) {
return getLegacyStarterOutputs(subBlocks) return getLegacyStarterOutputs(subBlocks)
} }

View File

@@ -31,6 +31,7 @@ export interface ExecuteWorkflowCoreOptions {
snapshot: ExecutionSnapshot snapshot: ExecutionSnapshot
callbacks: ExecutionCallbacks callbacks: ExecutionCallbacks
loggingSession: LoggingSession loggingSession: LoggingSession
skipLogCreation?: boolean // For resume executions - reuse existing log entry
} }
function parseVariableValueByType(value: any, type: string): any { function parseVariableValueByType(value: any, type: string): any {
@@ -97,7 +98,7 @@ function parseVariableValueByType(value: any, type: string): any {
export async function executeWorkflowCore( export async function executeWorkflowCore(
options: ExecuteWorkflowCoreOptions options: ExecuteWorkflowCoreOptions
): Promise<ExecutionResult> { ): Promise<ExecutionResult> {
const { snapshot, callbacks, loggingSession } = options const { snapshot, callbacks, loggingSession, skipLogCreation } = options
const { metadata, workflow, input, environmentVariables, workflowVariables, selectedOutputs } = const { metadata, workflow, input, environmentVariables, workflowVariables, selectedOutputs } =
snapshot snapshot
const { requestId, workflowId, userId, triggerType, executionId, triggerBlockId, useDraftState } = const { requestId, workflowId, userId, triggerType, executionId, triggerBlockId, useDraftState } =
@@ -153,6 +154,7 @@ export async function executeWorkflowCore(
userId, userId,
workspaceId: providedWorkspaceId, workspaceId: providedWorkspaceId,
variables, variables,
skipLogCreation, // Skip if resuming an existing execution
}) })
// Process block states with env var substitution // Process block states with env var substitution
@@ -225,8 +227,19 @@ export async function executeWorkflowCore(
const filteredEdges = filterEdgesFromTriggerBlocks(mergedStates, edges) const filteredEdges = filterEdgesFromTriggerBlocks(mergedStates, edges)
// Check if this is a resume execution before trigger resolution
const resumeFromSnapshot = (metadata as any).resumeFromSnapshot === true
const resumePendingQueue = snapshot.state?.pendingQueue
let resolvedTriggerBlockId = triggerBlockId let resolvedTriggerBlockId = triggerBlockId
if (!triggerBlockId) {
// For resume executions, skip trigger resolution since we have a pending queue
if (resumeFromSnapshot && resumePendingQueue?.length) {
resolvedTriggerBlockId = undefined
logger.info(`[${requestId}] Skipping trigger resolution for resume execution`, {
pendingQueueLength: resumePendingQueue.length,
})
} else if (!triggerBlockId) {
const executionKind = const executionKind =
triggerType === 'api' || triggerType === 'chat' ? (triggerType as 'api' | 'chat') : 'manual' triggerType === 'api' || triggerType === 'chat' ? (triggerType as 'api' | 'chat') : 'manual'
@@ -263,6 +276,18 @@ export async function executeWorkflowCore(
processedInput = input || {} processedInput = input || {}
// Create and execute workflow with callbacks // Create and execute workflow with callbacks
if (resumeFromSnapshot) {
logger.info(`[${requestId}] Resume execution detected`, {
resumePendingQueue,
hasState: !!snapshot.state,
stateBlockStatesCount: snapshot.state
? Object.keys(snapshot.state.blockStates || {}).length
: 0,
executedBlocksCount: snapshot.state?.executedBlocks?.length ?? 0,
useDraftState,
})
}
const contextExtensions: any = { const contextExtensions: any = {
stream: !!onStream, stream: !!onStream,
selectedOutputs, selectedOutputs,
@@ -273,6 +298,11 @@ export async function executeWorkflowCore(
onBlockStart, onBlockStart,
onBlockComplete, onBlockComplete,
onStream, onStream,
resumeFromSnapshot,
resumePendingQueue,
remainingEdges: snapshot.state?.remainingEdges,
dagIncomingEdges: snapshot.state?.dagIncomingEdges,
snapshotState: snapshot.state,
} }
const executorInstance = new Executor({ const executorInstance = new Executor({
@@ -305,11 +335,11 @@ export async function executeWorkflowCore(
resolvedTriggerBlockId resolvedTriggerBlockId
)) as ExecutionResult )) as ExecutionResult
// Build trace spans for logging // Build trace spans for logging from the full execution result
const { traceSpans, totalDuration } = buildTraceSpans(result) const { traceSpans, totalDuration } = buildTraceSpans(result)
// Update workflow run counts // Update workflow run counts
if (result.success) { if (result.success && result.status !== 'paused') {
await updateWorkflowRunCounts(workflowId) await updateWorkflowRunCounts(workflowId)
} }

File diff suppressed because it is too large Load Diff

View File

@@ -118,6 +118,7 @@ export interface WorkflowLog {
bucketName?: string bucketName?: string
}> }>
cost?: CostMetadata cost?: CostMetadata
hasPendingPause?: boolean
executionData?: ToolCallMetadata & { executionData?: ToolCallMetadata & {
traceSpans?: TraceSpan[] traceSpans?: TraceSpan[]
totalDuration?: number totalDuration?: number

View File

@@ -4,7 +4,6 @@ import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger' import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs' import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { getBlock } from '@/blocks' import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { import {
@@ -120,9 +119,7 @@ export const useWorkflowStore = create<WorkflowStore>()(
// Get outputs based on trigger mode // Get outputs based on trigger mode
const triggerMode = blockProperties?.triggerMode ?? false const triggerMode = blockProperties?.triggerMode ?? false
const outputs = triggerMode const outputs = getBlockOutputs(type, subBlocks, triggerMode)
? getBlockOutputs(type, subBlocks, triggerMode)
: resolveOutputType(blockConfig.outputs)
const newState = { const newState = {
blocks: { blocks: {

View File

@@ -27,7 +27,9 @@
"@sim/db": ["../../packages/db"], "@sim/db": ["../../packages/db"],
"@sim/db/*": ["../../packages/db/*"], "@sim/db/*": ["../../packages/db/*"],
"@/executor": ["./executor"], "@/executor": ["./executor"],
"@/executor/*": ["./executor/*"] "@/executor/*": ["./executor/*"],
"@/executor/pause-resume": ["./executor/pause-resume"],
"@/executor/pause-resume/*": ["./executor/pause-resume/*"]
}, },
"allowJs": true, "allowJs": true,
"noEmit": true, "noEmit": true,

View File

@@ -0,0 +1,37 @@
CREATE TABLE "paused_executions" (
"id" text PRIMARY KEY NOT NULL,
"workflow_id" text NOT NULL,
"execution_id" text NOT NULL,
"execution_snapshot" jsonb NOT NULL,
"pause_points" jsonb NOT NULL,
"total_pause_count" integer NOT NULL,
"resumed_count" integer DEFAULT 0 NOT NULL,
"status" text DEFAULT 'paused' NOT NULL,
"metadata" jsonb DEFAULT '{}'::jsonb NOT NULL,
"paused_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
"expires_at" timestamp
);
--> statement-breakpoint
CREATE TABLE "resume_queue" (
"id" text PRIMARY KEY NOT NULL,
"paused_execution_id" text NOT NULL,
"parent_execution_id" text NOT NULL,
"new_execution_id" text NOT NULL,
"context_id" text NOT NULL,
"resume_input" jsonb,
"status" text DEFAULT 'pending' NOT NULL,
"queued_at" timestamp DEFAULT now() NOT NULL,
"claimed_at" timestamp,
"completed_at" timestamp,
"failure_reason" text
);
--> statement-breakpoint
ALTER TABLE "custom_tools" ALTER COLUMN "workspace_id" DROP NOT NULL;--> statement-breakpoint
ALTER TABLE "paused_executions" ADD CONSTRAINT "paused_executions_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "resume_queue" ADD CONSTRAINT "resume_queue_paused_execution_id_paused_executions_id_fk" FOREIGN KEY ("paused_execution_id") REFERENCES "public"."paused_executions"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "paused_executions_workflow_id_idx" ON "paused_executions" USING btree ("workflow_id");--> statement-breakpoint
CREATE INDEX "paused_executions_status_idx" ON "paused_executions" USING btree ("status");--> statement-breakpoint
CREATE UNIQUE INDEX "paused_executions_execution_id_unique" ON "paused_executions" USING btree ("execution_id");--> statement-breakpoint
CREATE INDEX "resume_queue_parent_status_idx" ON "resume_queue" USING btree ("parent_execution_id","status","queued_at");--> statement-breakpoint
CREATE INDEX "resume_queue_new_execution_idx" ON "resume_queue" USING btree ("new_execution_id");

File diff suppressed because it is too large Load Diff

View File

@@ -736,6 +736,13 @@
"when": 1761860659858, "when": 1761860659858,
"tag": "0105_glamorous_wrecking_crew", "tag": "0105_glamorous_wrecking_crew",
"breakpoints": true "breakpoints": true
},
{
"idx": 106,
"version": "7",
"when": 1762371130884,
"tag": "0106_bitter_captain_midlands",
"breakpoints": true
} }
] ]
} }

View File

@@ -317,6 +317,58 @@ export const workflowExecutionLogs = pgTable(
}) })
) )
export const pausedExecutions = pgTable(
'paused_executions',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id, { onDelete: 'cascade' }),
executionId: text('execution_id').notNull(),
executionSnapshot: jsonb('execution_snapshot').notNull(),
pausePoints: jsonb('pause_points').notNull(),
totalPauseCount: integer('total_pause_count').notNull(),
resumedCount: integer('resumed_count').notNull().default(0),
status: text('status').notNull().default('paused'),
metadata: jsonb('metadata').notNull().default(sql`'{}'::jsonb`),
pausedAt: timestamp('paused_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
expiresAt: timestamp('expires_at'),
},
(table) => ({
workflowIdx: index('paused_executions_workflow_id_idx').on(table.workflowId),
statusIdx: index('paused_executions_status_idx').on(table.status),
executionUnique: uniqueIndex('paused_executions_execution_id_unique').on(table.executionId),
})
)
export const resumeQueue = pgTable(
'resume_queue',
{
id: text('id').primaryKey(),
pausedExecutionId: text('paused_execution_id')
.notNull()
.references(() => pausedExecutions.id, { onDelete: 'cascade' }),
parentExecutionId: text('parent_execution_id').notNull(),
newExecutionId: text('new_execution_id').notNull(),
contextId: text('context_id').notNull(),
resumeInput: jsonb('resume_input'),
status: text('status').notNull().default('pending'),
queuedAt: timestamp('queued_at').notNull().defaultNow(),
claimedAt: timestamp('claimed_at'),
completedAt: timestamp('completed_at'),
failureReason: text('failure_reason'),
},
(table) => ({
parentStatusIdx: index('resume_queue_parent_status_idx').on(
table.parentExecutionId,
table.status,
table.queuedAt
),
newExecutionIdx: index('resume_queue_new_execution_idx').on(table.newExecutionId),
})
)
export const environment = pgTable('environment', { export const environment = pgTable('environment', {
id: text('id').primaryKey(), // Use the user id as the key id: text('id').primaryKey(), // Use the user id as the key
userId: text('user_id') userId: text('user_id')