Checkpoint

This commit is contained in:
Siddharth Ganesan
2026-02-24 12:22:19 -08:00
parent 03908edcbb
commit 134c4c4f2a
7 changed files with 153 additions and 46 deletions

View File

@@ -38,6 +38,13 @@ export async function executeToolAndReport(
if (wasToolResultSeen(toolCall.id)) return
toolCall.status = 'executing'
logger.info('Tool execution started', {
toolCallId: toolCall.id,
toolName: toolCall.name,
params: toolCall.params,
})
try {
const result = await executeToolServerSide(toolCall, execContext)
toolCall.status = result.success ? 'success' : 'error'
@@ -45,6 +52,20 @@ export async function executeToolAndReport(
toolCall.error = result.error
toolCall.endTime = Date.now()
if (result.success) {
logger.info('Tool execution succeeded', {
toolCallId: toolCall.id,
toolName: toolCall.name,
})
} else {
logger.warn('Tool execution failed', {
toolCallId: toolCall.id,
toolName: toolCall.name,
error: result.error,
params: toolCall.params,
})
}
// If create_workflow was successful, update the execution context with the new workflowId.
// This ensures subsequent tools in the same stream have access to the workflowId.
const output = asRecord(result.output)
@@ -101,6 +122,13 @@ export async function executeToolAndReport(
toolCall.error = error instanceof Error ? error.message : String(error)
toolCall.endTime = Date.now()
logger.error('Tool execution threw', {
toolCallId: toolCall.id,
toolName: toolCall.name,
error: toolCall.error,
params: toolCall.params,
})
markToolResultSeen(toolCall.id)
// Fire-and-forget (same reasoning as above).

View File

@@ -33,9 +33,13 @@ export async function executeVfsGrep(
)
const outputMode = (params.output_mode as string) ?? 'content'
const key = outputMode === 'files_with_matches' ? 'files' : outputMode === 'count' ? 'counts' : 'matches'
const matchCount = Array.isArray(result) ? result.length : typeof result === 'object' ? Object.keys(result).length : 0
logger.debug('vfs_grep result', { pattern, path: params.path, outputMode, matchCount })
return { success: true, output: { [key]: result } }
} catch (err) {
logger.error('vfs_grep failed', {
pattern,
path: params.path,
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_grep failed' }
@@ -59,9 +63,11 @@ export async function executeVfsGlob(
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const files = vfs.glob(pattern)
logger.debug('vfs_glob result', { pattern, fileCount: files.length })
return { success: true, output: { files } }
} catch (err) {
logger.error('vfs_glob failed', {
pattern,
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_glob failed' }
@@ -90,11 +96,18 @@ export async function executeVfsRead(
params.limit as number | undefined
)
if (!result) {
return { success: false, error: `File not found: ${path}` }
const suggestions = vfs.suggestSimilar(path)
logger.warn('vfs_read file not found', { path, suggestions })
const hint = suggestions.length > 0
? ` Did you mean: ${suggestions.join(', ')}?`
: ' Use glob to discover available paths.'
return { success: false, error: `File not found: ${path}.${hint}` }
}
logger.debug('vfs_read result', { path, totalLines: result.totalLines })
return { success: true, output: result }
} catch (err) {
logger.error('vfs_read failed', {
path,
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_read failed' }
@@ -118,9 +131,11 @@ export async function executeVfsList(
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const entries = vfs.list(path)
logger.debug('vfs_list result', { path, entryCount: entries.length })
return { success: true, output: { entries } }
} catch (err) {
logger.error('vfs_list failed', {
path,
error: err instanceof Error ? err.message : String(err),
})
return { success: false, error: err instanceof Error ? err.message : 'vfs_list failed' }

View File

@@ -135,6 +135,25 @@ export function createBlockFromParams(
}
}
// Initialize default conditions/routes so edge handle validation works.
// The UI does this in the React component; we need to mirror it here.
if (params.type === 'condition' && !blockState.subBlocks.conditions?.value) {
blockState.subBlocks.conditions = {
id: 'conditions',
type: 'condition-input',
value: [
{ id: crypto.randomUUID(), title: 'If', value: '' },
{ id: crypto.randomUUID(), title: 'Else', value: '' },
],
}
} else if (params.type === 'router_v2' && !blockState.subBlocks.routes?.value) {
blockState.subBlocks.routes = {
id: 'routes',
type: 'router-input',
value: [{ id: crypto.randomUUID(), title: 'Route 1', value: '' }],
}
}
return blockState
}
@@ -228,6 +247,8 @@ const ARRAY_WITH_ID_SUBBLOCK_TYPES = new Set([
'tagFilters', // knowledge-tag-filters: Filters with id, tagName, etc.
'documentTags', // document-tag-entry: Tags with id, tagName, etc.
'metrics', // eval-input: Metrics with id, name, description, range
'conditions', // condition-input: Condition branches with id, title, value
'routes', // router-input: Router routes with id, title, value
])
/**
@@ -236,16 +257,27 @@ const ARRAY_WITH_ID_SUBBLOCK_TYPES = new Set([
* to be converted to proper UUIDs for consistency with UI-created items.
*/
export function normalizeArrayWithIds(value: unknown): any[] {
if (!Array.isArray(value)) {
let arr: any[]
if (Array.isArray(value)) {
arr = value
} else if (typeof value === 'string') {
try {
const parsed = JSON.parse(value)
if (!Array.isArray(parsed)) return []
arr = parsed
} catch {
return []
}
} else {
return []
}
return value.map((item: any) => {
return arr.map((item: any) => {
if (!item || typeof item !== 'object') {
return item
}
// Check if id is missing or not a valid UUID
const hasValidUUID = typeof item.id === 'string' && UUID_REGEX.test(item.id)
if (!hasValidUUID) {
return { ...item, id: crypto.randomUUID() }

View File

@@ -213,8 +213,8 @@ export function applyOperationsToWorkflowState(
handler(operation, ctx)
}
// Pass 2: Add all deferred connections from add/insert operations
// Now all blocks exist, so connections can be safely created
// Pass 2: Create all edges from deferred connections
// All blocks exist at this point, so forward references resolve correctly
if (ctx.deferredConnections.length > 0) {
logger.info('Processing deferred connections from add/insert operations', {
deferredConnectionCount: ctx.deferredConnections.length,

View File

@@ -8,7 +8,6 @@ import {
addConnectionsAsEdges,
applyTriggerConfigToBlockSubblocks,
createBlockFromParams,
createValidatedEdge,
filterDisallowedTools,
normalizeArrayWithIds,
normalizeResponseFormat,
@@ -78,7 +77,8 @@ export function handleDeleteOperation(op: EditWorkflowOperation, ctx: OperationC
}
export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationContext): void {
const { modifiedState, skippedItems, validationErrors, permissionConfig } = ctx
const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } =
ctx
const { block_id, params } = op
if (!modifiedState.blocks[block_id]) {
@@ -446,47 +446,13 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon
}
}
// Handle connections update (convert to edges)
// Defer connections to pass 2 so all blocks exist before edges are created
if (params?.connections) {
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
Object.entries(params.connections).forEach(([connectionType, targets]) => {
if (targets === null) return
const mapConnectionTypeToHandle = (type: string): string => {
if (type === 'success') return 'source'
if (type === 'error') return 'error'
return type
}
const sourceHandle = mapConnectionTypeToHandle(connectionType)
const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => {
createValidatedEdge(
modifiedState,
block_id,
targetBlock,
sourceHandle,
targetHandle || 'target',
'edit',
logger,
skippedItems
)
}
if (typeof targets === 'string') {
addEdgeForTarget(targets)
} else if (Array.isArray(targets)) {
targets.forEach((target: any) => {
if (typeof target === 'string') {
addEdgeForTarget(target)
} else if (target?.block) {
addEdgeForTarget(target.block, target.handle)
}
})
} else if (typeof targets === 'object' && (targets as any)?.block) {
addEdgeForTarget((targets as any).block, (targets as any).handle)
}
deferredConnections.push({
blockId: block_id,
connections: params.connections,
})
}

View File

@@ -254,3 +254,65 @@ export function list(files: Map<string, string>, path: string): DirEntry[] {
return a.name.localeCompare(b.name)
})
}
/**
* Find VFS paths similar to a missing path.
*
* Handles two cases:
* 1. Wrong filename: `components/blocks/gmail.json` → `gmail_v2.json`
* Matches by filename stem similarity within the same directory.
* 2. Wrong directory: `workflows/Untitled/state.json` → `Untitled Workflow`
* Matches by parent directory name similarity with the same filename.
*/
export function suggestSimilar(
files: Map<string, string>,
missingPath: string,
max = 5
): string[] {
const segments = missingPath.split('/')
const filename = segments[segments.length - 1].toLowerCase()
const fileStem = filename.replace(/\.[^.]+$/, '')
const parentDir = segments.length >= 2 ? segments[segments.length - 2].toLowerCase() : ''
const topDir = segments.length >= 1 ? segments[0] + '/' : ''
const scored: Array<{ path: string; score: number }> = []
for (const vfsPath of files.keys()) {
const vfsSegments = vfsPath.split('/')
const vfsFilename = vfsSegments[vfsSegments.length - 1].toLowerCase()
const vfsStem = vfsFilename.replace(/\.[^.]+$/, '')
const vfsParentDir = vfsSegments.length >= 2
? vfsSegments[vfsSegments.length - 2].toLowerCase()
: ''
const sameTopDir = topDir && vfsPath.startsWith(topDir)
// Same filename, different directory — the directory name is wrong.
// e.g. workflows/Untitled/state.json vs workflows/Untitled Workflow/state.json
if (vfsFilename === filename && vfsParentDir !== parentDir && sameTopDir) {
if (vfsParentDir.includes(parentDir) || parentDir.includes(vfsParentDir)) {
scored.push({ path: vfsPath, score: 95 })
continue
}
}
// Same directory, different filename — the filename is wrong.
const sameDir = segments.length === vfsSegments.length &&
segments.slice(0, -1).join('/') === vfsSegments.slice(0, -1).join('/')
if (sameDir) {
if (vfsStem === fileStem) {
scored.push({ path: vfsPath, score: 100 })
} else if (vfsStem.includes(fileStem) || fileStem.includes(vfsStem)) {
scored.push({ path: vfsPath, score: 80 })
} else if (vfsFilename.includes(fileStem.replace(/[_-]/g, ''))) {
scored.push({ path: vfsPath, score: 60 })
}
} else if (sameTopDir && vfsStem === fileStem) {
// Same top-level directory and matching stem but different depth/parent
scored.push({ path: vfsPath, score: 50 })
}
}
scored.sort((a, b) => b.score - a.score)
return scored.slice(0, max).map((s) => s.path)
}

View File

@@ -208,6 +208,10 @@ export class WorkspaceVFS {
return ops.list(this.files, path)
}
suggestSimilar(missingPath: string, max?: number): string[] {
return ops.suggestSimilar(this.files, missingPath, max)
}
/**
* Materialize all workflows in the workspace.
*/