improvement(executor): redesign executor + add start block (#1790)

* fix(billing): should allow restoring subscription (#1728)

* fix(already-cancelled-sub): UI should allow restoring subscription

* restore functionality fixed

* fix

* improvement(start): revert to start block

* make it work with start block

* fix start block persistence

* cleanup triggers

* debounce status checks

* update docs

* improvement(start): revert to start block

* make it work with start block

* fix start block persistence

* cleanup triggers

* debounce status checks

* update docs

* SSE v0.1

* v0.2

* v0.3

* v0.4

* v0.5

* v0.6

* broken checkpoint

* Executor progress - everything preliminarily tested except while loops and triggers

* Executor fixes

* Fix var typing

* Implement while loop execution

* Loop and parallel result agg

* Refactor v1 - loops work

* Fix var resolution in for each loop

* Fix while loop condition and variable resolution

* Fix loop iteration counts

* Fix loop badges

* Clean logs

* Fix variable references from start block

* Fix condition block

* Fix conditional convergence

* Dont execute orphaned nodse

* Code cleanup 1 and error surfacing

* compile time try catch

* Some fixes

* Fix error throwing

* Sentinels v1

* Fix multiple start and end nodes in loop

* Edge restoration

* Fix reachable nodes execution

* Parallel subflows

* Fix loop/parallel sentinel convergence

* Loops and parallels orchestrator

* Split executor

* Variable resolution split

* Dag phase

* Refactor

* Refactor

* Refactor 3

* Lint + refactor

* Lint + cleanup + refactor

* Readability

* Initial logs

* Fix trace spans

* Console pills for iters

* Add input/output pills

* Checkpoint

* remove unused code

* THIS IS THE COMMIT THAT CAN BREAK A LOT OF THINGS

* ANOTHER BIG REFACTOR

* Lint + fix tests

* Fix webhook

* Remove comment

* Merge stash

* Fix triggers?

* Stuff

* Fix error port

* Lint

* Consolidate state

* Clean up some var resolution

* Remove some var resolution logs

* Fix chat

* Fix chat triggers

* Fix chat trigger fully

* Snapshot refactor

* Fix mcp and custom tools

* Lint

* Fix parallel default count and trace span overlay

* Agent purple

* Fix test

* Fix test

---------

Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
This commit is contained in:
Siddharth Ganesan
2025-11-02 12:21:16 -08:00
committed by GitHub
parent 7d67ae397d
commit 3bf00cbd2a
137 changed files with 8552 additions and 20440 deletions

View File

@@ -8,7 +8,6 @@ import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import {
type BlockState,
calculateNextRunTime as calculateNextTime,
@@ -17,9 +16,9 @@ import {
} from '@/lib/schedules/utils'
import { decryptSecret } from '@/lib/utils'
import { blockExistsInDeployment, loadDeployedWorkflowState } from '@/lib/workflows/db-helpers'
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import { Executor } from '@/executor'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import { Serializer } from '@/serializer'
import { RateLimiter } from '@/services/queue'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
@@ -336,44 +335,38 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
variables: variables || {},
})
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: input,
workflowVariables,
contextExtensions: {
executionId,
workspaceId: workflowRecord.workspaceId || '',
isDeployedContext: true,
},
const metadata: ExecutionMetadata = {
requestId,
executionId,
workflowId: payload.workflowId,
workspaceId: workflowRecord.workspaceId || '',
userId: actorUserId,
triggerType: 'schedule',
triggerBlockId: payload.blockId || undefined,
useDraftState: false,
startTime: new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(
metadata,
workflowRecord,
input,
{},
workflowRecord.variables || {},
[]
)
const executionResult = await executeWorkflowCore({
snapshot,
callbacks: {},
loggingSession,
})
loggingSession.setupExecutor(executor)
const result = await executor.execute(payload.workflowId, payload.blockId || undefined)
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Workflow execution completed: ${payload.workflowId}`, {
success: executionResult.success,
executionTime: executionResult.metadata?.duration,
})
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
}
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: (traceSpans || []) as any,
})
return { success: executionResult.success, blocks, executionResult }
} catch (earlyError: any) {
logger.error(

View File

@@ -17,8 +17,9 @@ import {
loadDeployedWorkflowState,
loadWorkflowFromNormalizedTables,
} from '@/lib/workflows/db-helpers'
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { Executor } from '@/executor'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionResult } from '@/executor/types'
import { Serializer } from '@/serializer'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
@@ -257,51 +258,44 @@ async function executeWebhookJobInternal(
if (airtableInput) {
logger.info(`[${requestId}] Executing workflow with Airtable changes`)
// Create executor and execute (same as standard webhook flow)
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: airtableInput,
workflowVariables,
contextExtensions: {
executionId,
workspaceId: workspaceId || '',
isDeployedContext: !payload.testMode,
},
// Get workflow for core execution
const workflow = await getWorkflowById(payload.workflowId)
if (!workflow) {
throw new Error(`Workflow ${payload.workflowId} not found`)
}
const metadata: ExecutionMetadata = {
requestId,
executionId,
workflowId: payload.workflowId,
workspaceId,
userId: payload.userId,
triggerType: 'webhook',
triggerBlockId: payload.blockId,
useDraftState: false,
startTime: new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(
metadata,
workflow,
airtableInput,
{},
workflow.variables || {},
[]
)
const executionResult = await executeWorkflowCore({
snapshot,
callbacks: {},
loggingSession,
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Check if we got a StreamingExecution result
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Airtable webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
workflowInput: airtableInput,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
@@ -448,30 +442,40 @@ async function executeWebhookJobInternal(
}
}
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: input || {},
workflowVariables,
contextExtensions: {
executionId,
workspaceId: workspaceId || '',
isDeployedContext: !payload.testMode,
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
logger.info(`[${requestId}] Executing workflow for ${payload.provider} webhook`)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Get workflow for core execution
const workflow = await getWorkflowById(payload.workflowId)
if (!workflow) {
throw new Error(`Workflow ${payload.workflowId} not found`)
}
// Check if we got a StreamingExecution result
const executionResult = 'stream' in result && 'execution' in result ? result.execution : result
const metadata: ExecutionMetadata = {
requestId,
executionId,
workflowId: payload.workflowId,
workspaceId,
userId: payload.userId,
triggerType: 'webhook',
triggerBlockId: payload.blockId,
useDraftState: false,
startTime: new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(
metadata,
workflow,
input || {},
{},
workflow.variables || {},
[]
)
const executionResult = await executeWorkflowCore({
snapshot,
callbacks: {},
loggingSession,
})
logger.info(`[${requestId}] Webhook execution completed`, {
success: executionResult.success,
@@ -479,22 +483,6 @@ async function executeWebhookJobInternal(
provider: payload.provider,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
workflowInput: input,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,

View File

@@ -3,18 +3,11 @@ import { workflow as workflowTable } from '@sim/db/schema'
import { task } from '@trigger.dev/sdk'
import { eq } from 'drizzle-orm'
import { v4 as uuidv4 } from 'uuid'
import { checkServerSideUsageLimits } from '@/lib/billing'
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { decryptSecret } from '@/lib/utils'
import { loadDeployedWorkflowState } from '@/lib/workflows/db-helpers'
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { filterEdgesFromTriggerBlocks } from '@/app/workspace/[workspaceId]/w/[workflowId]/lib/workflow-execution-utils'
import { Executor } from '@/executor'
import { Serializer } from '@/serializer'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { getWorkflowById } from '@/lib/workflows/utils'
import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot'
const logger = createLogger('TriggerWorkflowExecution')
@@ -31,7 +24,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
const executionId = uuidv4()
const requestId = executionId.slice(0, 8)
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`, {
logger.info(`[${requestId}] Starting workflow execution job: ${workflowId}`, {
userId: payload.userId,
triggerType: payload.triggerType,
executionId,
@@ -42,46 +35,13 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
const loggingSession = new LoggingSession(workflowId, executionId, triggerType, requestId)
try {
const usageCheck = await checkServerSideUsageLimits(payload.userId)
if (usageCheck.isExceeded) {
logger.warn(
`[${requestId}] User ${payload.userId} has exceeded usage limits. Skipping workflow execution.`,
{
currentUsage: usageCheck.currentUsage,
limit: usageCheck.limit,
workflowId: payload.workflowId,
}
)
throw new Error(
usageCheck.message ||
'Usage limit exceeded. Please upgrade your plan to continue using workflows.'
)
// Load workflow from database
const workflow = await getWorkflowById(workflowId)
if (!workflow) {
throw new Error(`Workflow ${workflowId} not found`)
}
// Load workflow data from deployed state (this task is only used for API executions right now)
const workflowData = await loadDeployedWorkflowState(workflowId)
const { blocks, edges, loops, parallels } = workflowData
// Merge subblock states (server-safe version doesn't need workflowId)
const mergedStates = mergeSubblockState(blocks, {})
// Process block states for execution
const processedBlockStates = Object.entries(mergedStates).reduce(
(acc, [blockId, blockState]) => {
acc[blockId] = Object.entries(blockState.subBlocks).reduce(
(subAcc, [key, subBlock]) => {
subAcc[key] = subBlock.value
return subAcc
},
{} as Record<string, any>
)
return acc
},
{} as Record<string, Record<string, any>>
)
// Get environment variables with workspace precedence
// Get workspace ID for the workflow
const wfRows = await db
.select({ workspaceId: workflowTable.workspaceId })
.from(workflowTable)
@@ -89,116 +49,57 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
.limit(1)
const workspaceId = wfRows[0]?.workspaceId || undefined
const { personalEncrypted, workspaceEncrypted } = await getPersonalAndWorkspaceEnv(
payload.userId,
workspaceId
)
const mergedEncrypted = { ...personalEncrypted, ...workspaceEncrypted }
const decryptionPromises = Object.entries(mergedEncrypted).map(async ([key, encrypted]) => {
const { decrypted } = await decryptSecret(encrypted)
return [key, decrypted] as const
})
const decryptedPairs = await Promise.all(decryptionPromises)
const decryptedEnvVars: Record<string, string> = Object.fromEntries(decryptedPairs)
// Start logging session
await loggingSession.safeStart({
userId: payload.userId,
workspaceId: workspaceId || '',
variables: decryptedEnvVars,
})
// Filter out edges between trigger blocks - triggers are independent entry points
const filteredEdges = filterEdgesFromTriggerBlocks(mergedStates, edges)
// Create serialized workflow
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(
mergedStates,
filteredEdges,
loops || {},
parallels || {},
true // Enable validation during execution
)
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: payload.input || {},
workflowVariables: {},
contextExtensions: {
executionId,
workspaceId: workspaceId || '',
isDeployedContext: true,
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
const result = await executor.execute(workflowId)
// Handle streaming vs regular result
const executionResult = 'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: executionResult.success,
executionTime: executionResult.metadata?.duration,
const metadata: ExecutionMetadata = {
requestId,
executionId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(workflowId)
workflowId,
workspaceId,
userId: payload.userId,
triggerType: payload.triggerType || 'api',
useDraftState: false,
startTime: new Date().toISOString(),
}
// Build trace spans and complete logging session (for both success and failure)
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
const snapshot = new ExecutionSnapshot(
metadata,
workflow,
payload.input,
{},
workflow.variables || {},
[]
)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
const result = await executeWorkflowCore({
snapshot,
callbacks: {},
loggingSession,
})
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: result.success,
executionTime: result.metadata?.duration,
executionId,
})
return {
success: executionResult.success,
success: result.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
output: result.output,
executedAt: new Date().toISOString(),
metadata: payload.metadata,
}
} catch (error: any) {
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, {
error: error.message,
stack: error.stack,
executionId,
})
const executionResult = error?.executionResult || { success: false, output: {}, logs: [] }
const { traceSpans } = buildTraceSpans(executionResult)
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Workflow execution failed',
stackTrace: error.stack,
},
traceSpans,
})
throw error
}
}
export const workflowExecution = task({
// Trigger.dev task definition
export const workflowExecutionTask = task({
id: 'workflow-execution',
retry: {
maxAttempts: 1,
},
run: async (payload: WorkflowExecutionPayload) => executeWorkflowJob(payload),
run: executeWorkflowJob,
})