mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 15:34:58 -05:00
Compare commits
5 Commits
fix/confl
...
fix/custom
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09a0f5af05 | ||
|
|
81dfeb0bb0 | ||
|
|
01577a18b4 | ||
|
|
52aff4d60b | ||
|
|
3a3bddd6f8 |
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(500, 'Description must be 500 characters or less')
|
||||
.max(2000, 'Description must be 2000 characters or less')
|
||||
.nullable()
|
||||
.optional(),
|
||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||
let isStreamClosed = false
|
||||
|
||||
const eventWriter = createExecutionEventWriter(executionId)
|
||||
setExecutionMeta(executionId, {
|
||||
status: 'active',
|
||||
userId: actorUserId,
|
||||
workflowId,
|
||||
}).catch(() => {})
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (isStreamClosed) return
|
||||
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
||||
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
||||
eventWriter.write(event).catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let chunkCount = 0
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
chunkCount++
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} else {
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
|
||||
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'cancelled'
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'complete'
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||
const errorMessage = isTimeout
|
||||
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} finally {
|
||||
try {
|
||||
await eventWriter.close()
|
||||
} catch (closeError) {
|
||||
logger.warn(`[${requestId}] Failed to close event writer`, {
|
||||
error: closeError instanceof Error ? closeError.message : String(closeError),
|
||||
})
|
||||
}
|
||||
if (finalMetaStatus) {
|
||||
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
||||
}
|
||||
timeoutController.cleanup()
|
||||
if (executionId) {
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
timeoutController.cleanup()
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
timeoutController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -0,0 +1,170 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import {
|
||||
type ExecutionStreamStatus,
|
||||
getExecutionMeta,
|
||||
readExecutionEvents,
|
||||
} from '@/lib/execution/event-buffer'
|
||||
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('ExecutionStreamReconnectAPI')
|
||||
|
||||
const POLL_INTERVAL_MS = 500
|
||||
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
||||
|
||||
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
||||
return status === 'complete' || status === 'error' || status === 'cancelled'
|
||||
}
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
||||
) {
|
||||
const { id: workflowId, executionId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: auth.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!workflowAuthorization.allowed) {
|
||||
return NextResponse.json(
|
||||
{ error: workflowAuthorization.message || 'Access denied' },
|
||||
{ status: workflowAuthorization.status }
|
||||
)
|
||||
}
|
||||
|
||||
const meta = await getExecutionMeta(executionId)
|
||||
if (!meta) {
|
||||
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (meta.workflowId && meta.workflowId !== workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Execution does not belong to this workflow' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const fromParam = req.nextUrl.searchParams.get('from')
|
||||
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
||||
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
||||
|
||||
logger.info('Reconnection stream requested', {
|
||||
workflowId,
|
||||
executionId,
|
||||
fromEventId,
|
||||
metaStatus: meta.status,
|
||||
})
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
let closed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
let lastEventId = fromEventId
|
||||
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
||||
|
||||
const enqueue = (text: string) => {
|
||||
if (closed) return
|
||||
try {
|
||||
controller.enqueue(encoder.encode(text))
|
||||
} catch {
|
||||
closed = true
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const events = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of events) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const currentMeta = await getExecutionMeta(executionId)
|
||||
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
|
||||
while (!closed && Date.now() < pollDeadline) {
|
||||
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||
if (closed) return
|
||||
|
||||
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of newEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const polledMeta = await getExecutionMeta(executionId)
|
||||
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
||||
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of finalEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (!closed) {
|
||||
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
||||
enqueue('data: [DONE]\n\n')
|
||||
controller.close()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in reconnection stream', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
if (!closed) {
|
||||
try {
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
closed = true
|
||||
logger.info('Client disconnected from reconnection stream', { executionId })
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-Execution-Id': executionId,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to start reconnection stream', {
|
||||
workflowId,
|
||||
executionId,
|
||||
error: error.message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: error.message || 'Failed to start reconnection stream' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
||||
className='min-h-[120px] resize-none'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
maxLength={500}
|
||||
maxLength={2000}
|
||||
disabled={isGenerating}
|
||||
/>
|
||||
<div className='flex items-center justify-between'>
|
||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
||||
</p>
|
||||
)}
|
||||
{!updateMutation.error && !generateMutation.error && <div />}
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
||||
</div>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -57,6 +57,21 @@ export function useChangeDetection({
|
||||
}
|
||||
}
|
||||
|
||||
if (block.triggerMode) {
|
||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||
if (
|
||||
triggerConfigValue &&
|
||||
typeof triggerConfigValue === 'object' &&
|
||||
!subBlocks.triggerConfig
|
||||
) {
|
||||
subBlocks.triggerConfig = {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: triggerConfigValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blocksWithSubBlocks[blockId] = {
|
||||
...block,
|
||||
subBlocks,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('useWorkflowExecution')
|
||||
|
||||
// Debug state validation result
|
||||
/**
|
||||
* Module-level Set tracking which workflows have an active reconnection effect.
|
||||
* Prevents multiple hook instances (from different components) from starting
|
||||
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
||||
*/
|
||||
const activeReconnections = new Set<string>()
|
||||
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
error?: string
|
||||
@@ -54,7 +60,7 @@ interface DebugValidationResult {
|
||||
|
||||
interface BlockEventHandlerConfig {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
executionIdRef: { current: string }
|
||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||
activeBlocksSet: Set<string>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
|
||||
const queryClient = useQueryClient()
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
||||
useTerminalConsoleStore()
|
||||
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
||||
const { getAllVariables } = useEnvironmentStore()
|
||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||
useCurrentWorkflowExecution()
|
||||
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
||||
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
|
||||
(config: BlockEventHandlerConfig) => {
|
||||
const {
|
||||
workflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
|
||||
onBlockCompleteCallback,
|
||||
} = config
|
||||
|
||||
/** Returns true if this execution was cancelled or superseded by another run. */
|
||||
const isStaleExecution = () =>
|
||||
!!(
|
||||
workflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
||||
)
|
||||
|
||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||
if (!workflowId) return
|
||||
if (isActive) {
|
||||
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionId
|
||||
executionIdRef.current
|
||||
)
|
||||
}
|
||||
|
||||
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionId
|
||||
executionIdRef.current
|
||||
)
|
||||
}
|
||||
|
||||
const onBlockStarted = (data: BlockStartedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, true)
|
||||
markIncomingEdges(data.blockId)
|
||||
|
||||
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: undefined,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||
|
||||
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockError = (data: BlockErrorData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||
|
||||
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
// Update block logs with actual stream completion times
|
||||
if (result.logs && streamCompletionTimes.size > 0) {
|
||||
const streamCompletionEndTime = new Date(
|
||||
Math.max(...Array.from(streamCompletionTimes.values()))
|
||||
).toISOString()
|
||||
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamCompletionTimes.has(log.blockId)) {
|
||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
|
||||
return { success: true, stream }
|
||||
}
|
||||
|
||||
// For manual (non-chat) execution
|
||||
const manualExecutionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
|
||||
if (result.metadata.pendingBlocks) {
|
||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||
}
|
||||
} else if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
// Reset execution state after successful non-debug execution
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
if (isChatExecution) {
|
||||
if (!result.metadata) {
|
||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
||||
}
|
||||
;(result.metadata as any).source = 'chat'
|
||||
}
|
||||
|
||||
// Invalidate subscription queries to update usage
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
return result
|
||||
} catch (error: any) {
|
||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||
// Note: Error logs are already persisted server-side via execution-core.ts
|
||||
return errorResult
|
||||
}
|
||||
},
|
||||
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
|
||||
if (activeWorkflowId) {
|
||||
logger.info('Using server-side executor')
|
||||
|
||||
const executionId = uuidv4()
|
||||
const executionIdRef = { current: '' }
|
||||
|
||||
let executionResult: ExecutionResult = {
|
||||
success: false,
|
||||
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
|
||||
loops: clientWorkflowState.loops,
|
||||
parallels: clientWorkflowState.parallels,
|
||||
},
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(activeWorkflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onExecutionStarted: (data) => {
|
||||
logger.info('Server execution started:', data)
|
||||
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: data.success,
|
||||
output: data.output,
|
||||
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflowExecState = activeWorkflowId
|
||||
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
||||
: null
|
||||
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||
setExecutionResult(executionResult)
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: false,
|
||||
output: {},
|
||||
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
|
||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
isPreExecutionError,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return executionResult
|
||||
} catch (error: any) {
|
||||
// Don't log abort errors - they're intentional user actions
|
||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||
logger.info('Execution aborted by user')
|
||||
|
||||
// Reset execution state
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
|
||||
// Return gracefully without error
|
||||
return {
|
||||
success: false,
|
||||
output: {},
|
||||
metadata: { duration: 0 },
|
||||
logs: [],
|
||||
}
|
||||
return executionResult
|
||||
}
|
||||
|
||||
logger.error('Server-side execution failed:', error)
|
||||
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: should never reach here
|
||||
throw new Error('Server-side execution is required')
|
||||
}
|
||||
|
||||
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
|
||||
* Handles cancelling the current workflow execution
|
||||
*/
|
||||
const handleCancelExecution = useCallback(() => {
|
||||
if (!activeWorkflowId) return
|
||||
logger.info('Workflow execution cancellation requested')
|
||||
|
||||
// Cancel the execution stream for this workflow (server-side)
|
||||
executionStream.cancel(activeWorkflowId ?? undefined)
|
||||
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
||||
|
||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
||||
currentChatExecutionIdRef.current = null
|
||||
|
||||
// Mark all running entries as canceled in the terminal
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
|
||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
if (storedExecutionId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId: storedExecutionId,
|
||||
})
|
||||
}
|
||||
|
||||
// If in debug mode, also reset debug state
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
currentChatExecutionIdRef.current = null
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
if (isDebugging) {
|
||||
resetDebugState()
|
||||
}
|
||||
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
activeWorkflowId,
|
||||
cancelRunningEntries,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
handleExecutionCancelledConsole,
|
||||
])
|
||||
|
||||
/**
|
||||
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
setIsExecuting(workflowId, true)
|
||||
const executionId = uuidv4()
|
||||
const executionIdRef = { current: '' }
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
input: workflowInput,
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(workflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onBlockStarted: blockHandlers.onBlockStarted,
|
||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
// Add the start block (trigger) to executed blocks
|
||||
executedBlockIds.add(blockId)
|
||||
|
||||
const mergedBlockStates: Record<string, BlockState> = {
|
||||
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
|
||||
|
||||
handleExecutionErrorConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
|
||||
logger.error('Run-from-block failed:', error)
|
||||
}
|
||||
} finally {
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
const currentId = getCurrentExecutionId(workflowId)
|
||||
if (currentId === null || currentId === executionIdRef.current) {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
clearLastExecutionSnapshot,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
|
||||
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
undefined,
|
||||
undefined,
|
||||
executionId,
|
||||
undefined,
|
||||
'manual',
|
||||
blockId
|
||||
)
|
||||
if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
}
|
||||
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
||||
} catch (error) {
|
||||
const errorResult = handleExecutionError(error, { executionId })
|
||||
return errorResult
|
||||
} finally {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setIsDebugging(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||
[
|
||||
activeWorkflowId,
|
||||
setCurrentExecutionId,
|
||||
setExecutionResult,
|
||||
setIsExecuting,
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (!activeWorkflowId || !hasHydrated) return
|
||||
|
||||
const entries = useTerminalConsoleStore.getState().entries
|
||||
const runningEntries = entries.filter(
|
||||
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
||||
)
|
||||
if (runningEntries.length === 0) return
|
||||
|
||||
if (activeReconnections.has(activeWorkflowId)) return
|
||||
activeReconnections.add(activeWorkflowId)
|
||||
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
|
||||
const sorted = [...runningEntries].sort((a, b) => {
|
||||
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
||||
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
||||
return bTime - aTime
|
||||
})
|
||||
const executionId = sorted[0].executionId!
|
||||
|
||||
const otherExecutionIds = new Set(
|
||||
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
||||
)
|
||||
if (otherExecutionIds.size > 0) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(activeWorkflowId, executionId)
|
||||
setIsExecuting(activeWorkflowId, true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
const executionIdRef = { current: executionId }
|
||||
|
||||
const handlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode: 'update',
|
||||
includeStartConsoleEntry: true,
|
||||
})
|
||||
|
||||
const originalEntries = entries
|
||||
.filter((e) => e.executionId === executionId)
|
||||
.map((e) => ({ ...e }))
|
||||
|
||||
let cleared = false
|
||||
let reconnectionComplete = false
|
||||
let cleanupRan = false
|
||||
const clearOnce = () => {
|
||||
if (!cleared) {
|
||||
cleared = true
|
||||
clearExecutionEntries(executionId)
|
||||
}
|
||||
}
|
||||
|
||||
const reconnectWorkflowId = activeWorkflowId
|
||||
|
||||
executionStream
|
||||
.reconnect({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
callbacks: {
|
||||
onBlockStarted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockStarted(data)
|
||||
},
|
||||
onBlockCompleted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockCompleted(data)
|
||||
},
|
||||
onBlockError: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockError(data)
|
||||
},
|
||||
onExecutionCompleted: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
},
|
||||
onExecutionError: (data) => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
error: data.error,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
},
|
||||
onExecutionCancelled: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
})
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.warn('Execution reconnection failed', { executionId, error })
|
||||
})
|
||||
.finally(() => {
|
||||
if (reconnectionComplete || cleanupRan) return
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) return
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole({
|
||||
workflowId: entry.workflowId,
|
||||
blockId: entry.blockId,
|
||||
blockName: entry.blockName,
|
||||
blockType: entry.blockType,
|
||||
executionId: entry.executionId,
|
||||
executionOrder: entry.executionOrder,
|
||||
isRunning: false,
|
||||
warning: 'Execution result unavailable — check the logs page',
|
||||
})
|
||||
}
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
})
|
||||
|
||||
return () => {
|
||||
cleanupRan = true
|
||||
executionStream.cancel(reconnectWorkflowId)
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
|
||||
if (cleared && !reconnectionComplete) {
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole(entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [activeWorkflowId, hasHydrated])
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
|
||||
@@ -1901,5 +1901,317 @@ describe('AgentBlockHandler', () => {
|
||||
|
||||
expect(discoveryCalls[0].url).toContain('serverId=mcp-legacy-server')
|
||||
})
|
||||
|
||||
describe('customToolId resolution - DB as source of truth', () => {
|
||||
const staleInlineSchema = {
|
||||
function: {
|
||||
name: 'buttonTemplate',
|
||||
description: 'Creates a button template',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
sender_id: { type: 'string', description: 'Sender ID' },
|
||||
header_value: { type: 'string', description: 'Header text' },
|
||||
body_value: { type: 'string', description: 'Body text' },
|
||||
button_array: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'Button labels',
|
||||
},
|
||||
},
|
||||
required: ['sender_id', 'header_value', 'body_value', 'button_array'],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const dbSchema = {
|
||||
function: {
|
||||
name: 'buttonTemplate',
|
||||
description: 'Creates a button template',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
sender_id: { type: 'string', description: 'Sender ID' },
|
||||
header_value: { type: 'string', description: 'Header text' },
|
||||
body_value: { type: 'string', description: 'Body text' },
|
||||
button_array: {
|
||||
type: 'array',
|
||||
items: { type: 'string' },
|
||||
description: 'Button labels',
|
||||
},
|
||||
channel: { type: 'string', description: 'Channel name' },
|
||||
},
|
||||
required: ['sender_id', 'header_value', 'body_value', 'button_array', 'channel'],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const staleInlineCode =
|
||||
'return JSON.stringify({ type: "button", phone: sender_id, header: header_value, body: body_value, buttons: button_array });'
|
||||
const dbCode =
|
||||
'if (channel === "whatsapp") { return JSON.stringify({ type: "button", phone: sender_id, header: header_value, body: body_value, buttons: button_array }); }'
|
||||
|
||||
function mockFetchForCustomTool(toolId: string) {
|
||||
mockFetch.mockImplementation((url: string) => {
|
||||
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
headers: { get: () => null },
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
data: [
|
||||
{
|
||||
id: toolId,
|
||||
title: 'buttonTemplate',
|
||||
schema: dbSchema,
|
||||
code: dbCode,
|
||||
},
|
||||
],
|
||||
}),
|
||||
})
|
||||
}
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
headers: { get: () => null },
|
||||
json: () => Promise.resolve({}),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function mockFetchFailure() {
|
||||
mockFetch.mockImplementation((url: string) => {
|
||||
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
|
||||
return Promise.resolve({
|
||||
ok: false,
|
||||
status: 500,
|
||||
headers: { get: () => null },
|
||||
json: () => Promise.resolve({}),
|
||||
})
|
||||
}
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
headers: { get: () => null },
|
||||
json: () => Promise.resolve({}),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
Object.defineProperty(global, 'window', {
|
||||
value: undefined,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should always fetch latest schema from DB when customToolId is present', async () => {
|
||||
const toolId = 'custom-tool-123'
|
||||
mockFetchForCustomTool(toolId)
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Send a button template',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
customToolId: toolId,
|
||||
title: 'buttonTemplate',
|
||||
schema: staleInlineSchema,
|
||||
code: staleInlineCode,
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
expect(tools.length).toBe(1)
|
||||
// DB schema wins over stale inline — includes channel param
|
||||
expect(tools[0].parameters.required).toContain('channel')
|
||||
expect(tools[0].parameters.properties).toHaveProperty('channel')
|
||||
})
|
||||
|
||||
it('should fetch from DB when customToolId has no inline schema', async () => {
|
||||
const toolId = 'custom-tool-123'
|
||||
mockFetchForCustomTool(toolId)
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Send a button template',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
customToolId: toolId,
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
expect(tools.length).toBe(1)
|
||||
expect(tools[0].name).toBe('buttonTemplate')
|
||||
expect(tools[0].parameters.required).toContain('channel')
|
||||
})
|
||||
|
||||
it('should fall back to inline schema when DB fetch fails and inline exists', async () => {
|
||||
mockFetchFailure()
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Send a button template',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
customToolId: 'custom-tool-123',
|
||||
title: 'buttonTemplate',
|
||||
schema: staleInlineSchema,
|
||||
code: staleInlineCode,
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
// Falls back to inline schema (4 params, no channel)
|
||||
expect(tools.length).toBe(1)
|
||||
expect(tools[0].name).toBe('buttonTemplate')
|
||||
expect(tools[0].parameters.required).not.toContain('channel')
|
||||
})
|
||||
|
||||
it('should return null when DB fetch fails and no inline schema exists', async () => {
|
||||
mockFetchFailure()
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Send a button template',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
customToolId: 'custom-tool-123',
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
expect(tools.length).toBe(0)
|
||||
})
|
||||
|
||||
it('should use DB code for executeFunction when customToolId resolves', async () => {
|
||||
const toolId = 'custom-tool-123'
|
||||
mockFetchForCustomTool(toolId)
|
||||
|
||||
let capturedTools: any[] = []
|
||||
Promise.all = vi.fn().mockImplementation((promises: Promise<any>[]) => {
|
||||
const result = originalPromiseAll.call(Promise, promises)
|
||||
result.then((tools: any[]) => {
|
||||
if (tools?.length) {
|
||||
capturedTools = tools.filter((t) => t !== null)
|
||||
}
|
||||
})
|
||||
return result
|
||||
})
|
||||
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Send a button template',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
customToolId: toolId,
|
||||
title: 'buttonTemplate',
|
||||
schema: staleInlineSchema,
|
||||
code: staleInlineCode,
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
expect(capturedTools.length).toBe(1)
|
||||
expect(typeof capturedTools[0].executeFunction).toBe('function')
|
||||
|
||||
await capturedTools[0].executeFunction({ sender_id: '123', channel: 'whatsapp' })
|
||||
|
||||
// Should use DB code, not stale inline code
|
||||
expect(mockExecuteTool).toHaveBeenCalledWith(
|
||||
'function_execute',
|
||||
expect.objectContaining({
|
||||
code: dbCode,
|
||||
}),
|
||||
false,
|
||||
expect.any(Object)
|
||||
)
|
||||
})
|
||||
|
||||
it('should not fetch from DB when no customToolId is present', async () => {
|
||||
const inputs = {
|
||||
model: 'gpt-4o',
|
||||
userPrompt: 'Use the tool',
|
||||
apiKey: 'test-api-key',
|
||||
tools: [
|
||||
{
|
||||
type: 'custom-tool',
|
||||
title: 'inlineTool',
|
||||
schema: staleInlineSchema,
|
||||
code: staleInlineCode,
|
||||
usageControl: 'auto' as const,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
mockGetProviderFromModel.mockReturnValue('openai')
|
||||
|
||||
await handler.execute(mockContext, mockBlock, inputs)
|
||||
|
||||
const customToolFetches = mockFetch.mock.calls.filter(
|
||||
(call: any[]) => typeof call[0] === 'string' && call[0].includes('/api/tools/custom')
|
||||
)
|
||||
expect(customToolFetches.length).toBe(0)
|
||||
|
||||
expect(mockExecuteProviderRequest).toHaveBeenCalled()
|
||||
const providerCall = mockExecuteProviderRequest.mock.calls[0]
|
||||
const tools = providerCall[1].tools
|
||||
|
||||
expect(tools.length).toBe(1)
|
||||
expect(tools[0].name).toBe('buttonTemplate')
|
||||
expect(tools[0].parameters.required).not.toContain('channel')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -272,15 +272,16 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
let code = tool.code
|
||||
let title = tool.title
|
||||
|
||||
if (tool.customToolId && !schema) {
|
||||
if (tool.customToolId) {
|
||||
const resolved = await this.fetchCustomToolById(ctx, tool.customToolId)
|
||||
if (!resolved) {
|
||||
if (resolved) {
|
||||
schema = resolved.schema
|
||||
code = resolved.code
|
||||
title = resolved.title
|
||||
} else if (!schema) {
|
||||
logger.error(`Custom tool not found: ${tool.customToolId}`)
|
||||
return null
|
||||
}
|
||||
schema = resolved.schema
|
||||
code = resolved.code
|
||||
title = resolved.title
|
||||
}
|
||||
|
||||
if (!schema?.function) {
|
||||
|
||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
||||
|
||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||
|
||||
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
||||
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
||||
|
||||
Guidelines:
|
||||
- Use the specific values provided (credential names, channel names, model names)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type {
|
||||
BlockCompletedData,
|
||||
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||
* These should be treated as clean disconnects, not execution errors.
|
||||
*/
|
||||
function isClientDisconnectError(error: any): boolean {
|
||||
if (error.name === 'AbortError') return true
|
||||
const msg = (error.message ?? '').toLowerCase()
|
||||
return (
|
||||
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
stopAfterBlockId?: string
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
input?: any
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ReconnectStreamOptions {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
fromEventId?: number
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
/**
|
||||
* Module-level map shared across all hook instances.
|
||||
* Ensures ANY instance can cancel streams started by ANY other instance,
|
||||
* which is critical for SPA navigation where the original hook instance unmounts
|
||||
* but the SSE stream must be cancellable from the new instance.
|
||||
*/
|
||||
const sharedAbortControllers = new Map<string, AbortController>()
|
||||
|
||||
/**
|
||||
* Hook for executing workflows via server-side SSE streaming.
|
||||
* Supports concurrent executions via per-workflow AbortController maps.
|
||||
*/
|
||||
export function useExecutionStream() {
|
||||
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
||||
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
||||
new Map()
|
||||
)
|
||||
|
||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||
const { workflowId, callbacks = {}, ...payload } = options
|
||||
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
||||
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -177,42 +200,48 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Execution stream disconnected (page unload or abort)')
|
||||
return
|
||||
}
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
||||
const {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
sourceSnapshot,
|
||||
input,
|
||||
onExecutionId,
|
||||
callbacks = {},
|
||||
} = options
|
||||
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -246,64 +275,80 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
||||
return
|
||||
}
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
||||
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
||||
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||
if (!response.body) throw new Error('No response body')
|
||||
|
||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||
} catch (error: any) {
|
||||
if (isClientDisconnectError(error)) return
|
||||
logger.error('Reconnection stream error:', error)
|
||||
throw error
|
||||
} finally {
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback((workflowId?: string) => {
|
||||
if (workflowId) {
|
||||
const execution = currentExecutionsRef.current.get(workflowId)
|
||||
if (execution) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
const controller = abortControllersRef.current.get(workflowId)
|
||||
const controller = sharedAbortControllers.get(workflowId)
|
||||
if (controller) {
|
||||
controller.abort()
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
} else {
|
||||
for (const [, execution] of currentExecutionsRef.current) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
for (const [, controller] of abortControllersRef.current) {
|
||||
for (const [, controller] of sharedAbortControllers) {
|
||||
controller.abort()
|
||||
}
|
||||
abortControllersRef.current.clear()
|
||||
currentExecutionsRef.current.clear()
|
||||
sharedAbortControllers.clear()
|
||||
}
|
||||
}, [])
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
reconnect,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
|
||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||
commands?: string[]
|
||||
chatId?: string
|
||||
conversationId?: string
|
||||
prefetch?: boolean
|
||||
implicitFeedback?: string
|
||||
}
|
||||
|
||||
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId,
|
||||
conversationId,
|
||||
prefetch,
|
||||
conversationHistory,
|
||||
implicitFeedback,
|
||||
} = params
|
||||
|
||||
const selectedModel = options.selectedModel
|
||||
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||
...(chatId ? { chatId } : {}),
|
||||
...(conversationId ? { conversationId } : {}),
|
||||
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
|
||||
? { conversationHistory }
|
||||
: {}),
|
||||
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
|
||||
...(implicitFeedback ? { implicitFeedback } : {}),
|
||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||
...(credentials ? { credentials } : {}),
|
||||
|
||||
246
apps/sim/lib/execution/event-buffer.ts
Normal file
246
apps/sim/lib/execution/event-buffer.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
|
||||
const logger = createLogger('ExecutionEventBuffer')
|
||||
|
||||
const REDIS_PREFIX = 'execution:stream:'
|
||||
const TTL_SECONDS = 60 * 60 // 1 hour
|
||||
const EVENT_LIMIT = 1000
|
||||
const RESERVE_BATCH = 100
|
||||
const FLUSH_INTERVAL_MS = 15
|
||||
const FLUSH_MAX_BATCH = 200
|
||||
|
||||
function getEventsKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:events`
|
||||
}
|
||||
|
||||
function getSeqKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:seq`
|
||||
}
|
||||
|
||||
function getMetaKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:meta`
|
||||
}
|
||||
|
||||
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
|
||||
|
||||
export interface ExecutionStreamMeta {
|
||||
status: ExecutionStreamStatus
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
updatedAt?: string
|
||||
}
|
||||
|
||||
export interface ExecutionEventEntry {
|
||||
eventId: number
|
||||
executionId: string
|
||||
event: ExecutionEvent
|
||||
}
|
||||
|
||||
export interface ExecutionEventWriter {
|
||||
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
|
||||
flush: () => Promise<void>
|
||||
close: () => Promise<void>
|
||||
}
|
||||
|
||||
export async function setExecutionMeta(
|
||||
executionId: string,
|
||||
meta: Partial<ExecutionStreamMeta>
|
||||
): Promise<void> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const payload: Record<string, string> = {
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
if (meta.status) payload.status = meta.status
|
||||
if (meta.userId) payload.userId = meta.userId
|
||||
if (meta.workflowId) payload.workflowId = meta.workflowId
|
||||
await redis.hset(key, payload)
|
||||
await redis.expire(key, TTL_SECONDS)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to update execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
|
||||
return null
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const meta = await redis.hgetall(key)
|
||||
if (!meta || Object.keys(meta).length === 0) return null
|
||||
return meta as unknown as ExecutionStreamMeta
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function readExecutionEvents(
|
||||
executionId: string,
|
||||
afterEventId: number
|
||||
): Promise<ExecutionEventEntry[]> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return []
|
||||
try {
|
||||
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
|
||||
return raw
|
||||
.map((entry) => {
|
||||
try {
|
||||
return JSON.parse(entry) as ExecutionEventEntry
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
})
|
||||
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution events', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn(
|
||||
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
|
||||
{
|
||||
executionId,
|
||||
}
|
||||
)
|
||||
return {
|
||||
write: async (event) => ({ eventId: 0, executionId, event }),
|
||||
flush: async () => {},
|
||||
close: async () => {},
|
||||
}
|
||||
}
|
||||
|
||||
let pending: ExecutionEventEntry[] = []
|
||||
let nextEventId = 0
|
||||
let maxReservedId = 0
|
||||
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
const scheduleFlush = () => {
|
||||
if (flushTimer) return
|
||||
flushTimer = setTimeout(() => {
|
||||
flushTimer = null
|
||||
void flush()
|
||||
}, FLUSH_INTERVAL_MS)
|
||||
}
|
||||
|
||||
const reserveIds = async (minCount: number) => {
|
||||
const reserveCount = Math.max(RESERVE_BATCH, minCount)
|
||||
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
|
||||
const startId = newMax - reserveCount + 1
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
nextEventId = startId
|
||||
maxReservedId = newMax
|
||||
}
|
||||
}
|
||||
|
||||
let flushPromise: Promise<void> | null = null
|
||||
let closed = false
|
||||
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
|
||||
|
||||
const doFlush = async () => {
|
||||
if (pending.length === 0) return
|
||||
const batch = pending
|
||||
pending = []
|
||||
try {
|
||||
const key = getEventsKey(executionId)
|
||||
const zaddArgs: (string | number)[] = []
|
||||
for (const entry of batch) {
|
||||
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||
}
|
||||
const pipeline = redis.pipeline()
|
||||
pipeline.zadd(key, ...zaddArgs)
|
||||
pipeline.expire(key, TTL_SECONDS)
|
||||
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
|
||||
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
|
||||
await pipeline.exec()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to flush execution events', {
|
||||
executionId,
|
||||
batchSize: batch.length,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
pending = batch.concat(pending)
|
||||
}
|
||||
}
|
||||
|
||||
const flush = async () => {
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
return
|
||||
}
|
||||
flushPromise = doFlush()
|
||||
try {
|
||||
await flushPromise
|
||||
} finally {
|
||||
flushPromise = null
|
||||
if (pending.length > 0) scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
if (closed) return { eventId: 0, executionId, event }
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
await reserveIds(1)
|
||||
}
|
||||
const eventId = nextEventId++
|
||||
const entry: ExecutionEventEntry = { eventId, executionId, event }
|
||||
pending.push(entry)
|
||||
if (pending.length >= FLUSH_MAX_BATCH) {
|
||||
await flush()
|
||||
} else {
|
||||
scheduleFlush()
|
||||
}
|
||||
return entry
|
||||
}
|
||||
|
||||
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
const p = writeCore(event)
|
||||
inflightWrites.add(p)
|
||||
const remove = () => inflightWrites.delete(p)
|
||||
p.then(remove, remove)
|
||||
return p
|
||||
}
|
||||
|
||||
const close = async () => {
|
||||
closed = true
|
||||
if (flushTimer) {
|
||||
clearTimeout(flushTimer)
|
||||
flushTimer = null
|
||||
}
|
||||
if (inflightWrites.size > 0) {
|
||||
await Promise.allSettled(inflightWrites)
|
||||
}
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
}
|
||||
if (pending.length > 0) {
|
||||
await doFlush()
|
||||
}
|
||||
}
|
||||
|
||||
return { write, flush, close }
|
||||
}
|
||||
@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
||||
() => {
|
||||
// Core scenario: deployed state has null individual fields, current state has
|
||||
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should detect change when user edits a trigger field to a different value',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
||||
() => {
|
||||
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when triggerId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: 'slack_webhook' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'old payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'new payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
// includeFiles changed from false to true — this IS a real change
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||
it.concurrent('should not detect change when webhookId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeSubBlockValue,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
normalizeVariables,
|
||||
sanitizeVariable,
|
||||
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize trigger config values for both states before comparison
|
||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||
|
||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||
const allSubBlockIds = filterSubBlockIds([
|
||||
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||
])
|
||||
|
||||
for (const subId of allSubBlockIds) {
|
||||
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
||||
|
||||
if (!currentSub || !previousSub) {
|
||||
changes.push({
|
||||
|
||||
@@ -4,10 +4,12 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
filterSubBlockIds,
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
sanitizeInputFormat,
|
||||
sanitizeTools,
|
||||
@@ -584,4 +586,214 @@ describe('Workflow Normalization Utilities', () => {
|
||||
expect(result2).toBe(result3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('filterSubBlockIds', () => {
|
||||
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
||||
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
||||
const ids = [
|
||||
'signingSecret',
|
||||
'samplePayload_slack_webhook',
|
||||
'triggerInstructions_slack_webhook',
|
||||
'webhookUrlDisplay_slack_webhook',
|
||||
'botToken',
|
||||
]
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
||||
const ids = ['mySamplePayload', 'notSamplePayload']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
||||
})
|
||||
|
||||
it.concurrent('should return sorted results', () => {
|
||||
const ids = ['zebra', 'alpha', 'middle']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty array', () => {
|
||||
expect(filterSubBlockIds([])).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should handle all IDs being excluded', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
||||
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['realField'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
||||
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('normalizeTriggerConfigValues', () => {
|
||||
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
||||
const subBlocks = {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should return subBlocks unchanged when triggerConfig value is not an object',
|
||||
() => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
||||
})
|
||||
|
||||
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
||||
})
|
||||
|
||||
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: null, botToken: undefined },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { nonExistentField: 'value123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result.nonExistentField).toBeUndefined()
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should not mutate the original subBlocks object', () => {
|
||||
const original = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
normalizeTriggerConfigValues(original)
|
||||
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: {
|
||||
id: 'signingSecret',
|
||||
type: 'short-input',
|
||||
value: null,
|
||||
placeholder: 'Enter signing secret',
|
||||
},
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
const normalized = result.signingSecret as Record<string, unknown>
|
||||
expect(normalized.value).toBe('secret123')
|
||||
expect(normalized.id).toBe('signingSecret')
|
||||
expect(normalized.type).toBe('short-input')
|
||||
expect(normalized.placeholder).toBe('Enter signing secret')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -418,10 +418,48 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
||||
*/
|
||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||
return subBlockIds
|
||||
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
||||
.filter((id) => {
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||
return false
|
||||
return true
|
||||
})
|
||||
.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
||||
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
||||
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
||||
* comparison between client state (with populated values) and deployed state
|
||||
* (with null values from DB).
|
||||
*/
|
||||
export function normalizeTriggerConfigValues(
|
||||
subBlocks: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
||||
const triggerConfigValue = triggerConfigSub?.value
|
||||
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
||||
return subBlocks
|
||||
}
|
||||
|
||||
const result = { ...subBlocks }
|
||||
for (const [fieldId, configValue] of Object.entries(
|
||||
triggerConfigValue as Record<string, unknown>
|
||||
)) {
|
||||
if (configValue === null || configValue === undefined) continue
|
||||
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
||||
if (
|
||||
existingSub &&
|
||||
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
||||
) {
|
||||
result[fieldId] = { ...existingSub, value: configValue }
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||
|
||||
@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
})
|
||||
},
|
||||
|
||||
setCurrentExecutionId: (workflowId, executionId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
currentExecutionId: executionId,
|
||||
}),
|
||||
})
|
||||
},
|
||||
|
||||
getCurrentExecutionId: (workflowId) => {
|
||||
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
|
||||
},
|
||||
|
||||
clearRunPath: (workflowId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
|
||||
@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
|
||||
lastRunPath: Map<string, BlockRunStatus>
|
||||
/** Maps edge IDs to their run result from the last execution */
|
||||
lastRunEdges: Map<string, EdgeRunStatus>
|
||||
/** The execution ID of the currently running execution */
|
||||
currentExecutionId: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
|
||||
debugContext: null,
|
||||
lastRunPath: new Map(),
|
||||
lastRunEdges: new Map(),
|
||||
currentExecutionId: null,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,6 +99,10 @@ export interface ExecutionActions {
|
||||
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
||||
/** Clears the run path and run edges for a workflow */
|
||||
clearRunPath: (workflowId: string) => void
|
||||
/** Stores the current execution ID for a workflow */
|
||||
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
|
||||
/** Returns the current execution ID for a workflow */
|
||||
getCurrentExecutionId: (workflowId: string) => string | null
|
||||
/** Resets the entire store to its initial empty state */
|
||||
reset: () => void
|
||||
/** Stores a serializable execution snapshot for a workflow */
|
||||
|
||||
@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
|
||||
const newEntry = get().entries[0]
|
||||
|
||||
if (newEntry?.error) {
|
||||
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
||||
notifyBlockError({
|
||||
error: newEntry.error,
|
||||
blockName: newEntry.blockName || 'Unknown Block',
|
||||
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
useExecutionStore.getState().clearRunPath(workflowId)
|
||||
},
|
||||
|
||||
clearExecutionEntries: (executionId: string) =>
|
||||
set((state) => ({
|
||||
entries: state.entries.filter((e) => e.executionId !== executionId),
|
||||
})),
|
||||
|
||||
exportConsoleCSV: (workflowId: string) => {
|
||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||
|
||||
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
|
||||
const rawEntries = persisted?.entries ?? currentState.entries
|
||||
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||
|
||||
const entries = rawEntries.map((entry, index) => {
|
||||
let updated = entry
|
||||
if (entry.executionOrder === undefined) {
|
||||
return { ...entry, executionOrder: index + 1 }
|
||||
updated = { ...updated, executionOrder: index + 1 }
|
||||
}
|
||||
return entry
|
||||
if (
|
||||
entry.isRunning &&
|
||||
entry.startedAt &&
|
||||
new Date(entry.startedAt).getTime() < oneHourAgo
|
||||
) {
|
||||
updated = { ...updated, isRunning: false }
|
||||
}
|
||||
return updated
|
||||
})
|
||||
|
||||
return {
|
||||
...currentState,
|
||||
entries,
|
||||
|
||||
@@ -51,6 +51,7 @@ export interface ConsoleStore {
|
||||
isOpen: boolean
|
||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
||||
clearWorkflowConsole: (workflowId: string) => void
|
||||
clearExecutionEntries: (executionId: string) => void
|
||||
exportConsoleCSV: (workflowId: string) => void
|
||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||
toggleConsole: () => void
|
||||
|
||||
@@ -23,7 +23,12 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
||||
* with default values from the trigger definition on load, which aren't present in
|
||||
* the deployed state, causing false positive change detection.
|
||||
*/
|
||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
|
||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
|
||||
'webhookId',
|
||||
'triggerPath',
|
||||
'triggerConfig',
|
||||
'triggerId',
|
||||
]
|
||||
|
||||
/**
|
||||
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
||||
|
||||
Reference in New Issue
Block a user