mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 07:24:55 -05:00
Compare commits
4 Commits
feat/smart
...
sim-609-to
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a0ebe0842b | ||
|
|
d236cc8ad0 | ||
|
|
81dfeb0bb0 | ||
|
|
01577a18b4 |
@@ -1,81 +1,145 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { settings } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
import { getSession } from '@/lib/auth'
|
||||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||||
|
|
||||||
function copilotHeaders(): HeadersInit {
|
/**
|
||||||
const headers: Record<string, string> = {
|
* GET - Fetch user's auto-allowed integration tools
|
||||||
'Content-Type': 'application/json',
|
*/
|
||||||
}
|
export async function GET() {
|
||||||
if (env.COPILOT_API_KEY) {
|
try {
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
const session = await getSession()
|
||||||
}
|
|
||||||
return headers
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function DELETE(request: NextRequest) {
|
if (!session?.user?.id) {
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
const userId = session.user.id
|
||||||
const toolIdFromBody = await request
|
|
||||||
.json()
|
const [userSettings] = await db
|
||||||
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
.select()
|
||||||
.catch(() => undefined)
|
.from(settings)
|
||||||
const toolId = toolIdFromBody || toolIdFromQuery
|
.where(eq(settings.userId, userId))
|
||||||
if (!toolId) {
|
.limit(1)
|
||||||
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
|
||||||
|
if (userSettings) {
|
||||||
|
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||||
|
return NextResponse.json({ autoAllowedTools })
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
await db.insert(settings).values({
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
id: userId,
|
||||||
method: 'DELETE',
|
|
||||||
headers: copilotHeaders(),
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
userId,
|
||||||
toolId,
|
copilotAutoAllowedTools: [],
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const payload = await res.json().catch(() => ({}))
|
return NextResponse.json({ autoAllowedTools: [] })
|
||||||
if (!res.ok) {
|
|
||||||
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
|
||||||
status: res.status,
|
|
||||||
userId,
|
|
||||||
toolId,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: payload?.error || 'Failed to remove auto-allowed tool',
|
|
||||||
autoAllowedTools: [],
|
|
||||||
},
|
|
||||||
{ status: res.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
|
||||||
})
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error removing auto-allowed tool', {
|
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||||
userId,
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
toolId,
|
}
|
||||||
error: error instanceof Error ? error.message : String(error),
|
}
|
||||||
})
|
|
||||||
return NextResponse.json(
|
/**
|
||||||
{
|
* POST - Add a tool to the auto-allowed list
|
||||||
success: false,
|
*/
|
||||||
error: 'Failed to remove auto-allowed tool',
|
export async function POST(request: NextRequest) {
|
||||||
autoAllowedTools: [],
|
try {
|
||||||
},
|
const session = await getSession()
|
||||||
{ status: 500 }
|
|
||||||
)
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const body = await request.json()
|
||||||
|
|
||||||
|
if (!body.toolId || typeof body.toolId !== 'string') {
|
||||||
|
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolId = body.toolId
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
|
||||||
|
if (!currentTools.includes(toolId)) {
|
||||||
|
const updatedTools = [...currentTools, toolId]
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
id: userId,
|
||||||
|
userId,
|
||||||
|
copilotAutoAllowedTools: [toolId],
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to add auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE - Remove a tool from the auto-allowed list
|
||||||
|
*/
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const { searchParams } = new URL(request.url)
|
||||||
|
const toolId = searchParams.get('toolId')
|
||||||
|
|
||||||
|
if (!toolId) {
|
||||||
|
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to remove auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import {
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
REDIS_TOOL_CALL_PREFIX,
|
|
||||||
REDIS_TOOL_CALL_TTL_SECONDS,
|
|
||||||
SIM_AGENT_API_URL,
|
|
||||||
} from '@/lib/copilot/constants'
|
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -14,7 +10,6 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
type NotificationStatus,
|
type NotificationStatus,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
const logger = createLogger('CopilotConfirmAPI')
|
const logger = createLogger('CopilotConfirmAPI')
|
||||||
@@ -26,8 +21,6 @@ const ConfirmationSchema = z.object({
|
|||||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||||
}),
|
}),
|
||||||
message: z.string().optional(), // Optional message for background moves or additional context
|
message: z.string().optional(), // Optional message for background moves or additional context
|
||||||
toolName: z.string().optional(),
|
|
||||||
remember: z.boolean().optional(),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -64,44 +57,6 @@ async function updateToolCallStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
if (env.COPILOT_API_KEY) {
|
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers,
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
|
||||||
toolId: toolName,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.warn('Failed to persist auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
status: response.status,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error persisting auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/confirm
|
* POST /api/copilot/confirm
|
||||||
* Update tool call status (Accept/Reject)
|
* Update tool call status (Accept/Reject)
|
||||||
@@ -119,7 +74,7 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
||||||
|
|
||||||
// Update the tool call status in Redis
|
// Update the tool call status in Redis
|
||||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||||
@@ -135,22 +90,14 @@ export async function POST(req: NextRequest) {
|
|||||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
let rememberSaved = false
|
const duration = tracker.getDuration()
|
||||||
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
|
||||||
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
const response: Record<string, unknown> = {
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
}
|
})
|
||||||
if (remember === true) {
|
|
||||||
response.rememberSaved = rememberSaved
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json(response)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = tracker.getDuration()
|
const duration = tracker.getDuration()
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
|||||||
description: z
|
description: z
|
||||||
.string()
|
.string()
|
||||||
.trim()
|
.trim()
|
||||||
.max(500, 'Description must be 500 characters or less')
|
.max(2000, 'Description must be 2000 characters or less')
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ import {
|
|||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||||
import { processInputFileFields } from '@/lib/execution/files'
|
import { processInputFileFields } from '@/lib/execution/files'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
@@ -700,17 +700,29 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
|
||||||
|
const eventWriter = createExecutionEventWriter(executionId)
|
||||||
|
setExecutionMeta(executionId, {
|
||||||
|
status: 'active',
|
||||||
|
userId: actorUserId,
|
||||||
|
workflowId,
|
||||||
|
}).catch(() => {})
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
const sendEvent = (event: ExecutionEvent) => {
|
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
||||||
if (isStreamClosed) return
|
|
||||||
|
|
||||||
|
const sendEvent = (event: ExecutionEvent) => {
|
||||||
|
if (!isStreamClosed) {
|
||||||
try {
|
try {
|
||||||
controller.enqueue(encodeSSEEvent(event))
|
controller.enqueue(encodeSSEEvent(event))
|
||||||
} catch {
|
} catch {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
||||||
|
eventWriter.write(event).catch(() => {})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const startTime = new Date()
|
const startTime = new Date()
|
||||||
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
const reader = streamingExec.stream.getReader()
|
const reader = streamingExec.stream.getReader()
|
||||||
const decoder = new TextDecoder()
|
const decoder = new TextDecoder()
|
||||||
let chunkCount = 0
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
while (true) {
|
while (true) {
|
||||||
const { done, value } = await reader.read()
|
const { done, value } = await reader.read()
|
||||||
if (done) break
|
if (done) break
|
||||||
|
|
||||||
chunkCount++
|
|
||||||
const chunk = decoder.decode(value, { stream: true })
|
const chunk = decoder.decode(value, { stream: true })
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'stream:chunk',
|
type: 'stream:chunk',
|
||||||
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'error'
|
||||||
} else {
|
} else {
|
||||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||||
|
|
||||||
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'cancelled'
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'complete'
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||||
const errorMessage = isTimeout
|
const errorMessage = isTimeout
|
||||||
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: executionResult?.metadata?.duration || 0,
|
duration: executionResult?.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
finalMetaStatus = 'error'
|
||||||
} finally {
|
} finally {
|
||||||
|
try {
|
||||||
|
await eventWriter.close()
|
||||||
|
} catch (closeError) {
|
||||||
|
logger.warn(`[${requestId}] Failed to close event writer`, {
|
||||||
|
error: closeError instanceof Error ? closeError.message : String(closeError),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (finalMetaStatus) {
|
||||||
|
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
||||||
|
}
|
||||||
timeoutController.cleanup()
|
timeoutController.cleanup()
|
||||||
if (executionId) {
|
if (executionId) {
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
timeoutController.cleanup()
|
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
||||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
|
||||||
timeoutController.abort()
|
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,170 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
import {
|
||||||
|
type ExecutionStreamStatus,
|
||||||
|
getExecutionMeta,
|
||||||
|
readExecutionEvents,
|
||||||
|
} from '@/lib/execution/event-buffer'
|
||||||
|
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||||
|
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('ExecutionStreamReconnectAPI')
|
||||||
|
|
||||||
|
const POLL_INTERVAL_MS = 500
|
||||||
|
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
||||||
|
|
||||||
|
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
||||||
|
return status === 'complete' || status === 'error' || status === 'cancelled'
|
||||||
|
}
|
||||||
|
|
||||||
|
export const runtime = 'nodejs'
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
||||||
|
) {
|
||||||
|
const { id: workflowId, executionId } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
|
workflowId,
|
||||||
|
userId: auth.userId,
|
||||||
|
action: 'read',
|
||||||
|
})
|
||||||
|
if (!workflowAuthorization.allowed) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: workflowAuthorization.message || 'Access denied' },
|
||||||
|
{ status: workflowAuthorization.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = await getExecutionMeta(executionId)
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.workflowId && meta.workflowId !== workflowId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Execution does not belong to this workflow' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fromParam = req.nextUrl.searchParams.get('from')
|
||||||
|
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
||||||
|
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
||||||
|
|
||||||
|
logger.info('Reconnection stream requested', {
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
fromEventId,
|
||||||
|
metaStatus: meta.status,
|
||||||
|
})
|
||||||
|
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
|
||||||
|
let closed = false
|
||||||
|
|
||||||
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = fromEventId
|
||||||
|
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
||||||
|
|
||||||
|
const enqueue = (text: string) => {
|
||||||
|
if (closed) return
|
||||||
|
try {
|
||||||
|
controller.enqueue(encoder.encode(text))
|
||||||
|
} catch {
|
||||||
|
closed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const events = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of events) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentMeta = await getExecutionMeta(executionId)
|
||||||
|
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
if (!closed) controller.close()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
while (!closed && Date.now() < pollDeadline) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
if (closed) return
|
||||||
|
|
||||||
|
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of newEvents) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
|
||||||
|
const polledMeta = await getExecutionMeta(executionId)
|
||||||
|
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
||||||
|
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
||||||
|
for (const entry of finalEvents) {
|
||||||
|
if (closed) return
|
||||||
|
enqueue(formatSSEEvent(entry.event))
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
}
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
if (!closed) controller.close()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!closed) {
|
||||||
|
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
||||||
|
enqueue('data: [DONE]\n\n')
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error in reconnection stream', {
|
||||||
|
executionId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
if (!closed) {
|
||||||
|
try {
|
||||||
|
controller.close()
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel() {
|
||||||
|
closed = true
|
||||||
|
logger.info('Client disconnected from reconnection stream', { executionId })
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new NextResponse(stream, {
|
||||||
|
headers: {
|
||||||
|
...SSE_HEADERS,
|
||||||
|
'X-Execution-Id': executionId,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error: any) {
|
||||||
|
logger.error('Failed to start reconnection stream', {
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
error: error.message,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: error.message || 'Failed to start reconnection stream' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -14,15 +14,6 @@ const logger = createLogger('DiffControls')
|
|||||||
const NOTIFICATION_WIDTH = 240
|
const NOTIFICATION_WIDTH = 240
|
||||||
const NOTIFICATION_GAP = 16
|
const NOTIFICATION_GAP = 16
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name === 'edit_workflow') return true
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||||
@@ -73,7 +64,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -81,9 +72,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('accepted', id)
|
if (id) updatePreviewToolCallState('accepted', id)
|
||||||
@@ -113,7 +102,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -121,9 +110,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('rejected', id)
|
if (id) updatePreviewToolCallState('rejected', id)
|
||||||
|
|||||||
@@ -47,28 +47,6 @@ interface ParsedTags {
|
|||||||
cleanContent: string
|
cleanContent: string
|
||||||
}
|
}
|
||||||
|
|
||||||
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
|
||||||
const candidate = ((toolCall as any)?.parameters ||
|
|
||||||
(toolCall as any)?.input ||
|
|
||||||
(toolCall as any)?.params ||
|
|
||||||
{}) as Record<string, unknown>
|
|
||||||
return candidate && typeof candidate === 'object' ? candidate : {}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
|
||||||
const params = getToolCallParams(toolCall)
|
|
||||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall) return false
|
|
||||||
if (toolCall.name === 'edit_workflow') return true
|
|
||||||
return isWorkflowChangeApplyMode(toolCall)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||||
* @param blocks - The subagent content blocks to search
|
* @param blocks - The subagent content blocks to search
|
||||||
@@ -893,10 +871,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||||
if (
|
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
||||||
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
|
||||||
isWorkflowEditSummaryTool(segment.block.toolCall)
|
|
||||||
) {
|
|
||||||
return (
|
return (
|
||||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||||
@@ -993,11 +968,12 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
}
|
}
|
||||||
}, [blocks])
|
}, [blocks])
|
||||||
|
|
||||||
if (!isWorkflowEditSummaryTool(toolCall)) {
|
if (toolCall.name !== 'edit_workflow') {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const params = getToolCallParams(toolCall)
|
const params =
|
||||||
|
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
||||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||||
|
|
||||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||||
@@ -1243,6 +1219,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Checks if a tool is server-side executed (not a client tool) */
|
||||||
|
function isIntegrationTool(toolName: string): boolean {
|
||||||
|
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
|
}
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||||
return false
|
return false
|
||||||
@@ -1252,96 +1233,59 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (toolCall.ui?.showInterrupt !== true) {
|
// Never show buttons for tools the user has marked as always-allowed
|
||||||
|
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
||||||
|
if (hasInterrupt) {
|
||||||
return true
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Integration tools (user-installed) always require approval
|
||||||
|
if (isIntegrationTool(toolCall.name)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolCallLogger = createLogger('CopilotToolCall')
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
async function sendToolDecision(
|
async function sendToolDecision(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
status: 'accepted' | 'rejected' | 'background',
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
options?: {
|
|
||||||
toolName?: string
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
await fetch('/api/copilot/confirm', {
|
await fetch('/api/copilot/confirm', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
toolCallId,
|
|
||||||
status,
|
|
||||||
...(options?.toolName ? { toolName: options.toolName } : {}),
|
|
||||||
...(options?.remember ? { remember: true } : {}),
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toolCallLogger.warn('Failed to send tool decision', {
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
remember: options?.remember === true,
|
|
||||||
toolName: options?.toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
})
|
|
||||||
return response.ok
|
|
||||||
} catch (error) {
|
|
||||||
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
|
||||||
|
|
||||||
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
|
||||||
const id = action.id.toLowerCase()
|
|
||||||
if (id.includes('background')) return 'background'
|
|
||||||
if (action.kind === 'reject') return 'rejected'
|
|
||||||
return 'accepted'
|
|
||||||
}
|
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any,
|
editedParams?: any
|
||||||
options?: {
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
onStateChange?.('executing')
|
onStateChange?.('executing')
|
||||||
await sendToolDecision(toolCall.id, 'accepted', {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
toolName: toolCall.name,
|
|
||||||
remember: options?.remember === true,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
// for these tools; the client reports back via mark-complete.
|
// for these tools; the client reports back via mark-complete.
|
||||||
if (isClientRunCapability(toolCall)) {
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
||||||
const params = editedParams || toolCall.params || {}
|
const params = editedParams || toolCall.params || {}
|
||||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||||
}
|
}
|
||||||
@@ -1354,9 +1298,6 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
|
||||||
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
|
||||||
|
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
@@ -1401,37 +1342,53 @@ function RunSkipButtons({
|
|||||||
toolCall,
|
toolCall,
|
||||||
onStateChange,
|
onStateChange,
|
||||||
editedParams,
|
editedParams,
|
||||||
actions,
|
|
||||||
}: {
|
}: {
|
||||||
toolCall: CopilotToolCall
|
toolCall: CopilotToolCall
|
||||||
onStateChange?: (state: any) => void
|
onStateChange?: (state: any) => void
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
actions: ToolUiAction[]
|
|
||||||
}) {
|
}) {
|
||||||
const [isProcessing, setIsProcessing] = useState(false)
|
const [isProcessing, setIsProcessing] = useState(false)
|
||||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||||
const actionInProgressRef = useRef(false)
|
const actionInProgressRef = useRef(false)
|
||||||
const { setToolCallState } = useCopilotStore()
|
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
||||||
|
|
||||||
const onAction = async (action: ToolUiAction) => {
|
const onRun = async () => {
|
||||||
// Prevent race condition - check ref synchronously
|
// Prevent race condition - check ref synchronously
|
||||||
if (actionInProgressRef.current) return
|
if (actionInProgressRef.current) return
|
||||||
actionInProgressRef.current = true
|
actionInProgressRef.current = true
|
||||||
setIsProcessing(true)
|
setIsProcessing(true)
|
||||||
setButtonsHidden(true)
|
setButtonsHidden(true)
|
||||||
try {
|
try {
|
||||||
const decision = actionDecision(action)
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
if (decision === 'accepted') {
|
} finally {
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
setIsProcessing(false)
|
||||||
remember: action.remember === true,
|
actionInProgressRef.current = false
|
||||||
})
|
|
||||||
} else if (decision === 'rejected') {
|
|
||||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
|
||||||
} else {
|
|
||||||
setToolCallState(toolCall, ClientToolCallState.background)
|
|
||||||
onStateChange?.('background')
|
|
||||||
await sendToolDecision(toolCall.id, 'background')
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onAlwaysAllow = async () => {
|
||||||
|
// Prevent race condition - check ref synchronously
|
||||||
|
if (actionInProgressRef.current) return
|
||||||
|
actionInProgressRef.current = true
|
||||||
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await addAutoAllowedTool(toolCall.name)
|
||||||
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
|
} finally {
|
||||||
|
setIsProcessing(false)
|
||||||
|
actionInProgressRef.current = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onSkip = async () => {
|
||||||
|
// Prevent race condition - check ref synchronously
|
||||||
|
if (actionInProgressRef.current) return
|
||||||
|
actionInProgressRef.current = true
|
||||||
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||||
} finally {
|
} finally {
|
||||||
setIsProcessing(false)
|
setIsProcessing(false)
|
||||||
actionInProgressRef.current = false
|
actionInProgressRef.current = false
|
||||||
@@ -1440,22 +1397,23 @@ function RunSkipButtons({
|
|||||||
|
|
||||||
if (buttonsHidden) return null
|
if (buttonsHidden) return null
|
||||||
|
|
||||||
|
// Show "Always Allow" for all tools that require confirmation
|
||||||
|
const showAlwaysAllow = true
|
||||||
|
|
||||||
|
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||||
return (
|
return (
|
||||||
<div className='mt-[10px] flex gap-[6px]'>
|
<div className='mt-[10px] flex gap-[6px]'>
|
||||||
{actions.map((action, index) => {
|
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||||
const variant =
|
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||||
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
</Button>
|
||||||
return (
|
{showAlwaysAllow && (
|
||||||
<Button
|
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||||
key={action.id}
|
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||||
onClick={() => onAction(action)}
|
</Button>
|
||||||
disabled={isProcessing}
|
)}
|
||||||
variant={variant}
|
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||||
>
|
Skip
|
||||||
{isProcessing && index === 0 ? 'Working...' : action.label}
|
|
||||||
</Button>
|
</Button>
|
||||||
)
|
|
||||||
})}
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1472,16 +1430,10 @@ export function ToolCall({
|
|||||||
const liveToolCall = useCopilotStore((s) =>
|
const liveToolCall = useCopilotStore((s) =>
|
||||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||||
)
|
)
|
||||||
const rawToolCall = liveToolCall || toolCallProp
|
const toolCall = liveToolCall || toolCallProp
|
||||||
const hasRealToolCall = !!rawToolCall
|
|
||||||
const toolCall: CopilotToolCall =
|
// Guard: nothing to render without a toolCall
|
||||||
rawToolCall ||
|
if (!toolCall) return null
|
||||||
({
|
|
||||||
id: effectiveId || '',
|
|
||||||
name: '',
|
|
||||||
state: ClientToolCallState.generating,
|
|
||||||
params: {},
|
|
||||||
} as CopilotToolCall)
|
|
||||||
|
|
||||||
const isExpandablePending =
|
const isExpandablePending =
|
||||||
toolCall?.state === 'pending' &&
|
toolCall?.state === 'pending' &&
|
||||||
@@ -1489,15 +1441,17 @@ export function ToolCall({
|
|||||||
|
|
||||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||||
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
|
||||||
|
|
||||||
// State for editable parameters
|
// State for editable parameters
|
||||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||||
const [editedParams, setEditedParams] = useState(params)
|
const [editedParams, setEditedParams] = useState(params)
|
||||||
const paramsRef = useRef(params)
|
const paramsRef = useRef(params)
|
||||||
|
|
||||||
const { setToolCallState } = useCopilotStore()
|
// Check if this integration tool is auto-allowed
|
||||||
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
|
const isAutoAllowed = useCopilotStore(
|
||||||
|
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
||||||
|
)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -1507,14 +1461,6 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}, [params])
|
}, [params])
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setAutoAllowRemovedForCall(false)
|
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
}, [toolCall.id])
|
|
||||||
|
|
||||||
// Guard: nothing to render without a toolCall
|
|
||||||
if (!hasRealToolCall) return null
|
|
||||||
|
|
||||||
// Skip rendering some internal tools
|
// Skip rendering some internal tools
|
||||||
if (
|
if (
|
||||||
toolCall.name === 'checkoff_todo' ||
|
toolCall.name === 'checkoff_todo' ||
|
||||||
@@ -1526,9 +1472,7 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const isSubagentTool =
|
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
toolCall.execution?.target === 'go_subagent' ||
|
|
||||||
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1555,6 +1499,28 @@ export function ToolCall({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get current mode from store to determine if we should render integration tools
|
||||||
|
const mode = useCopilotStore.getState().mode
|
||||||
|
|
||||||
|
// Check if this is a completed/historical tool call (not pending/executing)
|
||||||
|
// Use string comparison to handle both enum values and string values from DB
|
||||||
|
const stateStr = String(toolCall.state)
|
||||||
|
const isCompletedToolCall =
|
||||||
|
stateStr === 'success' ||
|
||||||
|
stateStr === 'error' ||
|
||||||
|
stateStr === 'rejected' ||
|
||||||
|
stateStr === 'aborted'
|
||||||
|
|
||||||
|
// Allow rendering if:
|
||||||
|
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||||
|
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||||
|
// 3. Tool call is already completed (historical - should always render)
|
||||||
|
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
|
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||||
|
|
||||||
|
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
@@ -1564,14 +1530,6 @@ export function ToolCall({
|
|||||||
toolCall.name === 'make_api_request' ||
|
toolCall.name === 'make_api_request' ||
|
||||||
toolCall.name === 'set_global_workflow_variables'
|
toolCall.name === 'set_global_workflow_variables'
|
||||||
|
|
||||||
const interruptActions =
|
|
||||||
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
|
||||||
? toolCall.ui.actions
|
|
||||||
: [
|
|
||||||
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
|
||||||
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
|
||||||
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
|
||||||
]) as ToolUiAction[]
|
|
||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
@@ -2029,12 +1987,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
|
||||||
setAutoAllowRemovedForCall(true)
|
|
||||||
setShowRemoveAutoAllow(false)
|
setShowRemoveAutoAllow(false)
|
||||||
forceUpdate({})
|
forceUpdate({})
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2048,7 +2003,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2094,12 +2048,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
|
||||||
setAutoAllowRemovedForCall(true)
|
|
||||||
setShowRemoveAutoAllow(false)
|
setShowRemoveAutoAllow(false)
|
||||||
forceUpdate({})
|
forceUpdate({})
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2113,7 +2064,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2137,7 +2087,7 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||||
@@ -2159,12 +2109,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
|
||||||
setAutoAllowRemovedForCall(true)
|
|
||||||
setShowRemoveAutoAllow(false)
|
setShowRemoveAutoAllow(false)
|
||||||
forceUpdate({})
|
forceUpdate({})
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2178,7 +2125,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
) : showMoveToBackground ? (
|
) : showMoveToBackground ? (
|
||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
@@ -2209,7 +2155,7 @@ export function ToolCall({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{/* Workflow edit summary - shows block changes after edit_workflow/workflow_change(apply) */}
|
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
||||||
<WorkflowEditSummary toolCall={toolCall} />
|
<WorkflowEditSummary toolCall={toolCall} />
|
||||||
|
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
|
|||||||
@@ -113,6 +113,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
@@ -124,6 +125,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ interface UseCopilotInitializationProps {
|
|||||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||||
loadAvailableModels: () => Promise<void>
|
loadAvailableModels: () => Promise<void>
|
||||||
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
resumeActiveStream: () => Promise<boolean>
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
@@ -30,6 +32,8 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
@@ -116,6 +120,17 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
})
|
})
|
||||||
}, [isSendingMessage, resumeActiveStream])
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||||
|
hasLoadedAutoAllowedToolsRef.current = true
|
||||||
|
loadAutoAllowedTools().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, [loadAutoAllowedTools])
|
||||||
|
|
||||||
/** Load available models once on mount */
|
/** Load available models once on mount */
|
||||||
const hasLoadedModelsRef = useRef(false)
|
const hasLoadedModelsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
|||||||
className='min-h-[120px] resize-none'
|
className='min-h-[120px] resize-none'
|
||||||
value={description}
|
value={description}
|
||||||
onChange={(e) => setDescription(e.target.value)}
|
onChange={(e) => setDescription(e.target.value)}
|
||||||
maxLength={500}
|
maxLength={2000}
|
||||||
disabled={isGenerating}
|
disabled={isGenerating}
|
||||||
/>
|
/>
|
||||||
<div className='flex items-center justify-between'>
|
<div className='flex items-center justify-between'>
|
||||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
|||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
{!updateMutation.error && !generateMutation.error && <div />}
|
{!updateMutation.error && !generateMutation.error && <div />}
|
||||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
||||||
</div>
|
</div>
|
||||||
</ModalBody>
|
</ModalBody>
|
||||||
<ModalFooter>
|
<ModalFooter>
|
||||||
|
|||||||
@@ -57,6 +57,21 @@ export function useChangeDetection({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (block.triggerMode) {
|
||||||
|
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||||
|
if (
|
||||||
|
triggerConfigValue &&
|
||||||
|
typeof triggerConfigValue === 'object' &&
|
||||||
|
!subBlocks.triggerConfig
|
||||||
|
) {
|
||||||
|
subBlocks.triggerConfig = {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: triggerConfigValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
blocksWithSubBlocks[blockId] = {
|
blocksWithSubBlocks[blockId] = {
|
||||||
...block,
|
...block,
|
||||||
subBlocks,
|
subBlocks,
|
||||||
|
|||||||
@@ -0,0 +1,189 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import type React from 'react'
|
||||||
|
import { useRef, useState } from 'react'
|
||||||
|
import { ArrowLeftRight, ArrowUp } from 'lucide-react'
|
||||||
|
import { Button, Input, Label, Tooltip } from '@/components/emcn'
|
||||||
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
|
import type { WandControlHandlers } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Props for a generic parameter with label component
|
||||||
|
*/
|
||||||
|
export interface ParameterWithLabelProps {
|
||||||
|
paramId: string
|
||||||
|
title: string
|
||||||
|
isRequired: boolean
|
||||||
|
visibility: string
|
||||||
|
wandConfig?: {
|
||||||
|
enabled: boolean
|
||||||
|
prompt?: string
|
||||||
|
placeholder?: string
|
||||||
|
}
|
||||||
|
canonicalToggle?: {
|
||||||
|
mode: 'basic' | 'advanced'
|
||||||
|
disabled?: boolean
|
||||||
|
onToggle?: () => void
|
||||||
|
}
|
||||||
|
disabled: boolean
|
||||||
|
isPreview: boolean
|
||||||
|
children: (wandControlRef: React.MutableRefObject<WandControlHandlers | null>) => React.ReactNode
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generic wrapper component for parameters that manages wand state and renders label + input
|
||||||
|
*/
|
||||||
|
export function ParameterWithLabel({
|
||||||
|
paramId,
|
||||||
|
title,
|
||||||
|
isRequired,
|
||||||
|
visibility,
|
||||||
|
wandConfig,
|
||||||
|
canonicalToggle,
|
||||||
|
disabled,
|
||||||
|
isPreview,
|
||||||
|
children,
|
||||||
|
}: ParameterWithLabelProps) {
|
||||||
|
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||||
|
const [searchQuery, setSearchQuery] = useState('')
|
||||||
|
const searchInputRef = useRef<HTMLInputElement>(null)
|
||||||
|
const wandControlRef = useRef<WandControlHandlers | null>(null)
|
||||||
|
|
||||||
|
const isWandEnabled = wandConfig?.enabled ?? false
|
||||||
|
const showWand = isWandEnabled && !isPreview && !disabled
|
||||||
|
|
||||||
|
const handleSearchClick = (): void => {
|
||||||
|
setIsSearchActive(true)
|
||||||
|
setTimeout(() => {
|
||||||
|
searchInputRef.current?.focus()
|
||||||
|
}, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchBlur = (): void => {
|
||||||
|
if (!searchQuery.trim() && !wandControlRef.current?.isWandStreaming) {
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchChange = (value: string): void => {
|
||||||
|
setSearchQuery(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchSubmit = (): void => {
|
||||||
|
if (searchQuery.trim() && wandControlRef.current) {
|
||||||
|
wandControlRef.current.onWandTrigger(searchQuery)
|
||||||
|
setSearchQuery('')
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleSearchCancel = (): void => {
|
||||||
|
setSearchQuery('')
|
||||||
|
setIsSearchActive(false)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isStreaming = wandControlRef.current?.isWandStreaming ?? false
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div key={paramId} className='relative min-w-0 space-y-[6px]'>
|
||||||
|
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||||
|
<Label className='flex items-center gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-primary)]'>
|
||||||
|
{title}
|
||||||
|
{isRequired && visibility === 'user-only' && <span className='ml-0.5'>*</span>}
|
||||||
|
{visibility !== 'user-only' && (
|
||||||
|
<span className='ml-[6px] text-[12px] text-[var(--text-tertiary)]'>(optional)</span>
|
||||||
|
)}
|
||||||
|
</Label>
|
||||||
|
<div className='flex min-w-0 flex-1 items-center justify-end gap-[6px]'>
|
||||||
|
{showWand &&
|
||||||
|
(!isSearchActive ? (
|
||||||
|
<Button
|
||||||
|
variant='active'
|
||||||
|
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
||||||
|
onClick={handleSearchClick}
|
||||||
|
>
|
||||||
|
Generate
|
||||||
|
</Button>
|
||||||
|
) : (
|
||||||
|
<div className='-my-1 flex min-w-[120px] max-w-[280px] flex-1 items-center gap-[4px]'>
|
||||||
|
<Input
|
||||||
|
ref={searchInputRef}
|
||||||
|
value={isStreaming ? 'Generating...' : searchQuery}
|
||||||
|
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
|
||||||
|
handleSearchChange(e.target.value)
|
||||||
|
}
|
||||||
|
onBlur={(e: React.FocusEvent<HTMLInputElement>) => {
|
||||||
|
const relatedTarget = e.relatedTarget as HTMLElement | null
|
||||||
|
if (relatedTarget?.closest('button')) return
|
||||||
|
handleSearchBlur()
|
||||||
|
}}
|
||||||
|
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
if (e.key === 'Enter' && searchQuery.trim() && !isStreaming) {
|
||||||
|
handleSearchSubmit()
|
||||||
|
} else if (e.key === 'Escape') {
|
||||||
|
handleSearchCancel()
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
disabled={isStreaming}
|
||||||
|
className={cn(
|
||||||
|
'h-5 min-w-[80px] flex-1 text-[11px]',
|
||||||
|
isStreaming && 'text-muted-foreground'
|
||||||
|
)}
|
||||||
|
placeholder='Generate with AI...'
|
||||||
|
/>
|
||||||
|
<Button
|
||||||
|
variant='tertiary'
|
||||||
|
disabled={!searchQuery.trim() || isStreaming}
|
||||||
|
onMouseDown={(e: React.MouseEvent) => {
|
||||||
|
e.preventDefault()
|
||||||
|
e.stopPropagation()
|
||||||
|
}}
|
||||||
|
onClick={(e: React.MouseEvent) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
handleSearchSubmit()
|
||||||
|
}}
|
||||||
|
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
||||||
|
>
|
||||||
|
<ArrowUp className='h-[12px] w-[12px]' />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
{canonicalToggle && !isPreview && (
|
||||||
|
<Tooltip.Root>
|
||||||
|
<Tooltip.Trigger asChild>
|
||||||
|
<button
|
||||||
|
type='button'
|
||||||
|
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||||
|
onClick={canonicalToggle.onToggle}
|
||||||
|
disabled={canonicalToggle.disabled || disabled}
|
||||||
|
aria-label={
|
||||||
|
canonicalToggle.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<ArrowLeftRight
|
||||||
|
className={cn(
|
||||||
|
'!h-[12px] !w-[12px]',
|
||||||
|
canonicalToggle.mode === 'advanced'
|
||||||
|
? 'text-[var(--text-primary)]'
|
||||||
|
: 'text-[var(--text-secondary)]'
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</button>
|
||||||
|
</Tooltip.Trigger>
|
||||||
|
<Tooltip.Content side='top'>
|
||||||
|
<p>
|
||||||
|
{canonicalToggle.mode === 'advanced'
|
||||||
|
? 'Switch to selector'
|
||||||
|
: 'Switch to manual ID'}
|
||||||
|
</p>
|
||||||
|
</Tooltip.Content>
|
||||||
|
</Tooltip.Root>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className='relative w-full min-w-0'>{children(wandControlRef)}</div>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import { useEffect, useMemo, useRef } from 'react'
|
||||||
|
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||||
|
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block'
|
||||||
|
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
||||||
|
|
||||||
|
interface ToolSubBlockRendererProps {
|
||||||
|
blockId: string
|
||||||
|
subBlockId: string
|
||||||
|
toolIndex: number
|
||||||
|
subBlock: BlockSubBlockConfig
|
||||||
|
effectiveParamId: string
|
||||||
|
toolParams: Record<string, string> | undefined
|
||||||
|
onParamChange: (toolIndex: number, paramId: string, value: string) => void
|
||||||
|
disabled: boolean
|
||||||
|
canonicalToggle?: {
|
||||||
|
mode: 'basic' | 'advanced'
|
||||||
|
disabled?: boolean
|
||||||
|
onToggle?: () => void
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bridges the subblock store with StoredTool.params via a synthetic store key,
|
||||||
|
* then delegates all rendering to SubBlock for full parity.
|
||||||
|
*
|
||||||
|
* Two effects handle bidirectional sync:
|
||||||
|
* - tool.params → store (external changes)
|
||||||
|
* - store → tool.params (user interaction)
|
||||||
|
*/
|
||||||
|
export function ToolSubBlockRenderer({
|
||||||
|
blockId,
|
||||||
|
subBlockId,
|
||||||
|
toolIndex,
|
||||||
|
subBlock,
|
||||||
|
effectiveParamId,
|
||||||
|
toolParams,
|
||||||
|
onParamChange,
|
||||||
|
disabled,
|
||||||
|
canonicalToggle,
|
||||||
|
}: ToolSubBlockRendererProps) {
|
||||||
|
const syntheticId = `${subBlockId}-tool-${toolIndex}-${effectiveParamId}`
|
||||||
|
const [storeValue, setStoreValue] = useSubBlockValue(blockId, syntheticId)
|
||||||
|
|
||||||
|
const toolParamValue = toolParams?.[effectiveParamId] ?? ''
|
||||||
|
|
||||||
|
/** Tracks the last value we pushed to the store from tool.params to avoid echo loops */
|
||||||
|
const lastPushedToStoreRef = useRef<string | null>(null)
|
||||||
|
/** Tracks the last value we synced back to tool.params from the store */
|
||||||
|
const lastPushedToParamsRef = useRef<string | null>(null)
|
||||||
|
|
||||||
|
// Sync tool.params → store: push when the prop value changes (including first mount)
|
||||||
|
useEffect(() => {
|
||||||
|
if (!toolParamValue && lastPushedToStoreRef.current === null) {
|
||||||
|
// Skip initializing the store with an empty value on first mount —
|
||||||
|
// let the SubBlock component use its own default.
|
||||||
|
lastPushedToStoreRef.current = toolParamValue
|
||||||
|
lastPushedToParamsRef.current = toolParamValue
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (toolParamValue !== lastPushedToStoreRef.current) {
|
||||||
|
lastPushedToStoreRef.current = toolParamValue
|
||||||
|
lastPushedToParamsRef.current = toolParamValue
|
||||||
|
setStoreValue(toolParamValue)
|
||||||
|
}
|
||||||
|
}, [toolParamValue, setStoreValue])
|
||||||
|
|
||||||
|
// Sync store → tool.params: push when the user changes the value via SubBlock
|
||||||
|
useEffect(() => {
|
||||||
|
if (storeValue == null) return
|
||||||
|
const stringValue = typeof storeValue === 'string' ? storeValue : JSON.stringify(storeValue)
|
||||||
|
if (stringValue !== lastPushedToParamsRef.current) {
|
||||||
|
lastPushedToParamsRef.current = stringValue
|
||||||
|
lastPushedToStoreRef.current = stringValue
|
||||||
|
onParamChange(toolIndex, effectiveParamId, stringValue)
|
||||||
|
}
|
||||||
|
}, [storeValue, toolIndex, effectiveParamId, onParamChange])
|
||||||
|
|
||||||
|
// Determine if the parameter is optional for the user (LLM can fill it)
|
||||||
|
const visibility = subBlock.paramVisibility ?? 'user-or-llm'
|
||||||
|
const isOptionalForUser = visibility !== 'user-only'
|
||||||
|
|
||||||
|
const labelSuffix = useMemo(
|
||||||
|
() =>
|
||||||
|
isOptionalForUser ? (
|
||||||
|
<span className='ml-[6px] text-[12px] text-[var(--text-tertiary)]'>(optional)</span>
|
||||||
|
) : null,
|
||||||
|
[isOptionalForUser]
|
||||||
|
)
|
||||||
|
|
||||||
|
// Suppress SubBlock's "*" required indicator for optional-for-user params
|
||||||
|
const config = {
|
||||||
|
...subBlock,
|
||||||
|
id: syntheticId,
|
||||||
|
...(isOptionalForUser && { required: false }),
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SubBlock
|
||||||
|
blockId={blockId}
|
||||||
|
config={config}
|
||||||
|
isPreview={false}
|
||||||
|
disabled={disabled}
|
||||||
|
canonicalToggle={canonicalToggle}
|
||||||
|
labelSuffix={labelSuffix}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -2,37 +2,12 @@
|
|||||||
* @vitest-environment node
|
* @vitest-environment node
|
||||||
*/
|
*/
|
||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
|
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||||
interface StoredTool {
|
import {
|
||||||
type: string
|
isCustomToolAlreadySelected,
|
||||||
title?: string
|
isMcpToolAlreadySelected,
|
||||||
toolId?: string
|
isWorkflowAlreadySelected,
|
||||||
params?: Record<string, string>
|
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/utils'
|
||||||
customToolId?: string
|
|
||||||
schema?: any
|
|
||||||
code?: string
|
|
||||||
operation?: string
|
|
||||||
usageControl?: 'auto' | 'force' | 'none'
|
|
||||||
}
|
|
||||||
|
|
||||||
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
|
|
||||||
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isCustomToolAlreadySelected = (
|
|
||||||
selectedTools: StoredTool[],
|
|
||||||
customToolId: string
|
|
||||||
): boolean => {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
|
|
||||||
return selectedTools.some(
|
|
||||||
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('isMcpToolAlreadySelected', () => {
|
describe('isMcpToolAlreadySelected', () => {
|
||||||
describe('basic functionality', () => {
|
describe('basic functionality', () => {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,31 @@
|
|||||||
|
/**
|
||||||
|
* Represents a tool selected and configured in the workflow
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* For custom tools (new format), we only store: type, customToolId, usageControl, isExpanded.
|
||||||
|
* Everything else (title, schema, code) is loaded dynamically from the database.
|
||||||
|
* Legacy custom tools with inline schema/code are still supported for backwards compatibility.
|
||||||
|
*/
|
||||||
|
export interface StoredTool {
|
||||||
|
/** Block type identifier */
|
||||||
|
type: string
|
||||||
|
/** Display title for the tool (optional for new custom tool format) */
|
||||||
|
title?: string
|
||||||
|
/** Direct tool ID for execution (optional for new custom tool format) */
|
||||||
|
toolId?: string
|
||||||
|
/** Parameter values configured by the user (optional for new custom tool format) */
|
||||||
|
params?: Record<string, string>
|
||||||
|
/** Whether the tool details are expanded in UI */
|
||||||
|
isExpanded?: boolean
|
||||||
|
/** Database ID for custom tools (new format - reference only) */
|
||||||
|
customToolId?: string
|
||||||
|
/** Tool schema for custom tools (legacy format - inline JSON schema) */
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
schema?: Record<string, any>
|
||||||
|
/** Implementation code for custom tools (legacy format - inline) */
|
||||||
|
code?: string
|
||||||
|
/** Selected operation for multi-operation tools */
|
||||||
|
operation?: string
|
||||||
|
/** Tool usage control mode for LLM */
|
||||||
|
usageControl?: 'auto' | 'force' | 'none'
|
||||||
|
}
|
||||||
@@ -0,0 +1,32 @@
|
|||||||
|
import type { StoredTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tool-input/types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if an MCP tool is already selected.
|
||||||
|
*/
|
||||||
|
export function isMcpToolAlreadySelected(selectedTools: StoredTool[], mcpToolId: string): boolean {
|
||||||
|
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a custom tool is already selected.
|
||||||
|
*/
|
||||||
|
export function isCustomToolAlreadySelected(
|
||||||
|
selectedTools: StoredTool[],
|
||||||
|
customToolId: string
|
||||||
|
): boolean {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a workflow is already selected.
|
||||||
|
*/
|
||||||
|
export function isWorkflowAlreadySelected(
|
||||||
|
selectedTools: StoredTool[],
|
||||||
|
workflowId: string
|
||||||
|
): boolean {
|
||||||
|
return selectedTools.some(
|
||||||
|
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -76,6 +76,7 @@ interface SubBlockProps {
|
|||||||
disabled?: boolean
|
disabled?: boolean
|
||||||
onToggle?: () => void
|
onToggle?: () => void
|
||||||
}
|
}
|
||||||
|
labelSuffix?: React.ReactNode
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -202,7 +203,8 @@ const renderLabel = (
|
|||||||
showCopyButton: boolean
|
showCopyButton: boolean
|
||||||
copied: boolean
|
copied: boolean
|
||||||
onCopy: () => void
|
onCopy: () => void
|
||||||
}
|
},
|
||||||
|
labelSuffix?: React.ReactNode
|
||||||
): JSX.Element | null => {
|
): JSX.Element | null => {
|
||||||
if (config.type === 'switch') return null
|
if (config.type === 'switch') return null
|
||||||
if (!config.title) return null
|
if (!config.title) return null
|
||||||
@@ -218,6 +220,7 @@ const renderLabel = (
|
|||||||
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||||
{config.title}
|
{config.title}
|
||||||
{required && <span className='ml-0.5'>*</span>}
|
{required && <span className='ml-0.5'>*</span>}
|
||||||
|
{labelSuffix}
|
||||||
{config.type === 'code' &&
|
{config.type === 'code' &&
|
||||||
config.language === 'json' &&
|
config.language === 'json' &&
|
||||||
!isValidJson &&
|
!isValidJson &&
|
||||||
@@ -385,7 +388,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
canonicalToggleEqual
|
canonicalToggleEqual &&
|
||||||
|
prevProps.labelSuffix === nextProps.labelSuffix
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -415,6 +419,7 @@ function SubBlockComponent({
|
|||||||
fieldDiffStatus,
|
fieldDiffStatus,
|
||||||
allowExpandInPreview,
|
allowExpandInPreview,
|
||||||
canonicalToggle,
|
canonicalToggle,
|
||||||
|
labelSuffix,
|
||||||
}: SubBlockProps): JSX.Element {
|
}: SubBlockProps): JSX.Element {
|
||||||
const [isValidJson, setIsValidJson] = useState(true)
|
const [isValidJson, setIsValidJson] = useState(true)
|
||||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||||
@@ -1059,7 +1064,8 @@ function SubBlockComponent({
|
|||||||
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
|
||||||
copied,
|
copied,
|
||||||
onCopy: handleCopy,
|
onCopy: handleCopy,
|
||||||
}
|
},
|
||||||
|
labelSuffix
|
||||||
)}
|
)}
|
||||||
{renderInput()}
|
{renderInput()}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useRef, useState } from 'react'
|
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useQueryClient } from '@tanstack/react-query'
|
import { useQueryClient } from '@tanstack/react-query'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|||||||
|
|
||||||
const logger = createLogger('useWorkflowExecution')
|
const logger = createLogger('useWorkflowExecution')
|
||||||
|
|
||||||
// Debug state validation result
|
/**
|
||||||
|
* Module-level Set tracking which workflows have an active reconnection effect.
|
||||||
|
* Prevents multiple hook instances (from different components) from starting
|
||||||
|
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
||||||
|
*/
|
||||||
|
const activeReconnections = new Set<string>()
|
||||||
|
|
||||||
interface DebugValidationResult {
|
interface DebugValidationResult {
|
||||||
isValid: boolean
|
isValid: boolean
|
||||||
error?: string
|
error?: string
|
||||||
@@ -54,7 +60,7 @@ interface DebugValidationResult {
|
|||||||
|
|
||||||
interface BlockEventHandlerConfig {
|
interface BlockEventHandlerConfig {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
executionId?: string
|
executionIdRef: { current: string }
|
||||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||||
activeBlocksSet: Set<string>
|
activeBlocksSet: Set<string>
|
||||||
accumulatedBlockLogs: BlockLog[]
|
accumulatedBlockLogs: BlockLog[]
|
||||||
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
|
|||||||
const queryClient = useQueryClient()
|
const queryClient = useQueryClient()
|
||||||
const currentWorkflow = useCurrentWorkflow()
|
const currentWorkflow = useCurrentWorkflow()
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
||||||
useTerminalConsoleStore()
|
useTerminalConsoleStore()
|
||||||
|
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
||||||
const { getAllVariables } = useEnvironmentStore()
|
const { getAllVariables } = useEnvironmentStore()
|
||||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||||
useCurrentWorkflowExecution()
|
useCurrentWorkflowExecution()
|
||||||
|
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
||||||
|
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
||||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||||
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
|
|||||||
(config: BlockEventHandlerConfig) => {
|
(config: BlockEventHandlerConfig) => {
|
||||||
const {
|
const {
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
|
|||||||
onBlockCompleteCallback,
|
onBlockCompleteCallback,
|
||||||
} = config
|
} = config
|
||||||
|
|
||||||
|
/** Returns true if this execution was cancelled or superseded by another run. */
|
||||||
|
const isStaleExecution = () =>
|
||||||
|
!!(
|
||||||
|
workflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
||||||
|
)
|
||||||
|
|
||||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||||
if (!workflowId) return
|
if (!workflowId) return
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: data.endedAt,
|
endedAt: data.endedAt,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
iterationCurrent: data.iterationCurrent,
|
iterationCurrent: data.iterationCurrent,
|
||||||
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionId
|
executionIdRef.current
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
|
|||||||
iterationType: data.iterationType,
|
iterationType: data.iterationType,
|
||||||
iterationContainerId: data.iterationContainerId,
|
iterationContainerId: data.iterationContainerId,
|
||||||
},
|
},
|
||||||
executionId
|
executionIdRef.current
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const onBlockStarted = (data: BlockStartedData) => {
|
const onBlockStarted = (data: BlockStartedData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, true)
|
updateActiveBlocks(data.blockId, true)
|
||||||
markIncomingEdges(data.blockId)
|
markIncomingEdges(data.blockId)
|
||||||
|
|
||||||
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
|
|||||||
endedAt: undefined,
|
endedAt: undefined,
|
||||||
workflowId,
|
workflowId,
|
||||||
blockId: data.blockId,
|
blockId: data.blockId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
blockName: data.blockName || 'Unknown Block',
|
blockName: data.blockName || 'Unknown Block',
|
||||||
blockType: data.blockType || 'unknown',
|
blockType: data.blockType || 'unknown',
|
||||||
isRunning: true,
|
isRunning: true,
|
||||||
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||||
|
|
||||||
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const onBlockError = (data: BlockErrorData) => {
|
const onBlockError = (data: BlockErrorData) => {
|
||||||
|
if (isStaleExecution()) return
|
||||||
updateActiveBlocks(data.blockId, false)
|
updateActiveBlocks(data.blockId, false)
|
||||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||||
|
|
||||||
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
// Update block logs with actual stream completion times
|
// Update block logs with actual stream completion times
|
||||||
if (result.logs && streamCompletionTimes.size > 0) {
|
if (result.logs && streamCompletionTimes.size > 0) {
|
||||||
const streamCompletionEndTime = new Date(
|
|
||||||
Math.max(...Array.from(streamCompletionTimes.values()))
|
|
||||||
).toISOString()
|
|
||||||
|
|
||||||
result.logs.forEach((log: BlockLog) => {
|
result.logs.forEach((log: BlockLog) => {
|
||||||
if (streamCompletionTimes.has(log.blockId)) {
|
if (streamCompletionTimes.has(log.blockId)) {
|
||||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||||
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
|
|||||||
return { success: true, stream }
|
return { success: true, stream }
|
||||||
}
|
}
|
||||||
|
|
||||||
// For manual (non-chat) execution
|
|
||||||
const manualExecutionId = uuidv4()
|
const manualExecutionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
const result = await executeWorkflow(
|
const result = await executeWorkflow(
|
||||||
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
|
|||||||
if (result.metadata.pendingBlocks) {
|
if (result.metadata.pendingBlocks) {
|
||||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||||
}
|
}
|
||||||
} else if (result && 'success' in result) {
|
|
||||||
setExecutionResult(result)
|
|
||||||
// Reset execution state after successful non-debug execution
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setIsDebugging(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
|
|
||||||
if (isChatExecution) {
|
|
||||||
if (!result.metadata) {
|
|
||||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
|
||||||
}
|
|
||||||
;(result.metadata as any).source = 'chat'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invalidate subscription queries to update usage
|
|
||||||
setTimeout(() => {
|
|
||||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
|
||||||
}, 1000)
|
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||||
// Note: Error logs are already persisted server-side via execution-core.ts
|
|
||||||
return errorResult
|
return errorResult
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
|
|||||||
if (activeWorkflowId) {
|
if (activeWorkflowId) {
|
||||||
logger.info('Using server-side executor')
|
logger.info('Using server-side executor')
|
||||||
|
|
||||||
const executionId = uuidv4()
|
const executionIdRef = { current: '' }
|
||||||
|
|
||||||
let executionResult: ExecutionResult = {
|
let executionResult: ExecutionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
|
|||||||
loops: clientWorkflowState.loops,
|
loops: clientWorkflowState.loops,
|
||||||
parallels: clientWorkflowState.parallels,
|
parallels: clientWorkflowState.parallels,
|
||||||
},
|
},
|
||||||
|
onExecutionId: (id) => {
|
||||||
|
executionIdRef.current = id
|
||||||
|
setCurrentExecutionId(activeWorkflowId, id)
|
||||||
|
},
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onExecutionStarted: (data) => {
|
onExecutionStarted: (data) => {
|
||||||
logger.info('Server execution started:', data)
|
logger.info('Server execution started:', data)
|
||||||
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
|
|||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: data.success,
|
success: data.success,
|
||||||
output: data.output,
|
output: data.output,
|
||||||
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workflowExecState = activeWorkflowId
|
||||||
|
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
||||||
|
: null
|
||||||
|
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||||
|
setExecutionResult(executionResult)
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
setTimeout(() => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||||
|
}, 1000)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
executionResult = {
|
executionResult = {
|
||||||
success: false,
|
success: false,
|
||||||
output: {},
|
output: {},
|
||||||
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
|
|||||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
isPreExecutionError,
|
isPreExecutionError,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
|
if (
|
||||||
|
activeWorkflowId &&
|
||||||
|
executionIdRef.current &&
|
||||||
|
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||||
|
executionIdRef.current
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
}
|
||||||
|
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (activeWorkflowId) {
|
||||||
|
setIsExecuting(activeWorkflowId, false)
|
||||||
|
setIsDebugging(activeWorkflowId, false)
|
||||||
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
return executionResult
|
return executionResult
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
// Don't log abort errors - they're intentional user actions
|
|
||||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||||
logger.info('Execution aborted by user')
|
logger.info('Execution aborted by user')
|
||||||
|
return executionResult
|
||||||
// Reset execution state
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return gracefully without error
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
output: {},
|
|
||||||
metadata: { duration: 0 },
|
|
||||||
logs: [],
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.error('Server-side execution failed:', error)
|
logger.error('Server-side execution failed:', error)
|
||||||
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: should never reach here
|
|
||||||
throw new Error('Server-side execution is required')
|
throw new Error('Server-side execution is required')
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
|
|||||||
* Handles cancelling the current workflow execution
|
* Handles cancelling the current workflow execution
|
||||||
*/
|
*/
|
||||||
const handleCancelExecution = useCallback(() => {
|
const handleCancelExecution = useCallback(() => {
|
||||||
|
if (!activeWorkflowId) return
|
||||||
logger.info('Workflow execution cancellation requested')
|
logger.info('Workflow execution cancellation requested')
|
||||||
|
|
||||||
// Cancel the execution stream for this workflow (server-side)
|
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
||||||
executionStream.cancel(activeWorkflowId ?? undefined)
|
|
||||||
|
|
||||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
if (storedExecutionId) {
|
||||||
|
setCurrentExecutionId(activeWorkflowId, null)
|
||||||
|
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
||||||
|
method: 'POST',
|
||||||
|
}).catch(() => {})
|
||||||
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
|
executionId: storedExecutionId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
executionStream.cancel(activeWorkflowId)
|
||||||
currentChatExecutionIdRef.current = null
|
currentChatExecutionIdRef.current = null
|
||||||
|
|
||||||
// Mark all running entries as canceled in the terminal
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
cancelRunningEntries(activeWorkflowId)
|
|
||||||
|
|
||||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
|
||||||
setIsExecuting(activeWorkflowId, false)
|
setIsExecuting(activeWorkflowId, false)
|
||||||
setIsDebugging(activeWorkflowId, false)
|
setIsDebugging(activeWorkflowId, false)
|
||||||
setActiveBlocks(activeWorkflowId, new Set())
|
setActiveBlocks(activeWorkflowId, new Set())
|
||||||
}
|
|
||||||
|
|
||||||
// If in debug mode, also reset debug state
|
|
||||||
if (isDebugging) {
|
if (isDebugging) {
|
||||||
resetDebugState()
|
resetDebugState()
|
||||||
}
|
}
|
||||||
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
|
|||||||
setIsDebugging,
|
setIsDebugging,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
cancelRunningEntries,
|
getCurrentExecutionId,
|
||||||
|
setCurrentExecutionId,
|
||||||
|
handleExecutionCancelledConsole,
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
setIsExecuting(workflowId, true)
|
setIsExecuting(workflowId, true)
|
||||||
const executionId = uuidv4()
|
const executionIdRef = { current: '' }
|
||||||
const accumulatedBlockLogs: BlockLog[] = []
|
const accumulatedBlockLogs: BlockLog[] = []
|
||||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||||
const executedBlockIds = new Set<string>()
|
const executedBlockIds = new Set<string>()
|
||||||
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
|
|||||||
try {
|
try {
|
||||||
const blockHandlers = buildBlockEventHandlers({
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionIdRef,
|
||||||
workflowEdges,
|
workflowEdges,
|
||||||
activeBlocksSet,
|
activeBlocksSet,
|
||||||
accumulatedBlockLogs,
|
accumulatedBlockLogs,
|
||||||
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
|
|||||||
startBlockId: blockId,
|
startBlockId: blockId,
|
||||||
sourceSnapshot: effectiveSnapshot,
|
sourceSnapshot: effectiveSnapshot,
|
||||||
input: workflowInput,
|
input: workflowInput,
|
||||||
|
onExecutionId: (id) => {
|
||||||
|
executionIdRef.current = id
|
||||||
|
setCurrentExecutionId(workflowId, id)
|
||||||
|
},
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onBlockStarted: blockHandlers.onBlockStarted,
|
onBlockStarted: blockHandlers.onBlockStarted,
|
||||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||||
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
// Add the start block (trigger) to executed blocks
|
|
||||||
executedBlockIds.add(blockId)
|
executedBlockIds.add(blockId)
|
||||||
|
|
||||||
const mergedBlockStates: Record<string, BlockState> = {
|
const mergedBlockStates: Record<string, BlockState> = {
|
||||||
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
|
|||||||
}
|
}
|
||||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionError: (data) => {
|
onExecutionError: (data) => {
|
||||||
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
handleExecutionErrorConsole({
|
handleExecutionErrorConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
error: data.error,
|
error: data.error,
|
||||||
durationMs: data.duration,
|
durationMs: data.duration,
|
||||||
blockLogs: accumulatedBlockLogs,
|
blockLogs: accumulatedBlockLogs,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
|
|
||||||
onExecutionCancelled: (data) => {
|
onExecutionCancelled: (data) => {
|
||||||
handleExecutionCancelledConsole({
|
handleExecutionCancelledConsole({
|
||||||
workflowId,
|
workflowId,
|
||||||
executionId,
|
executionId: executionIdRef.current,
|
||||||
durationMs: data?.duration,
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
|
setIsExecuting(workflowId, false)
|
||||||
|
setActiveBlocks(workflowId, new Set())
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
|
|||||||
logger.error('Run-from-block failed:', error)
|
logger.error('Run-from-block failed:', error)
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
|
const currentId = getCurrentExecutionId(workflowId)
|
||||||
|
if (currentId === null || currentId === executionIdRef.current) {
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
setIsExecuting(workflowId, false)
|
setIsExecuting(workflowId, false)
|
||||||
setActiveBlocks(workflowId, new Set())
|
setActiveBlocks(workflowId, new Set())
|
||||||
}
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
getLastExecutionSnapshot,
|
getLastExecutionSnapshot,
|
||||||
setLastExecutionSnapshot,
|
setLastExecutionSnapshot,
|
||||||
clearLastExecutionSnapshot,
|
clearLastExecutionSnapshot,
|
||||||
|
getCurrentExecutionId,
|
||||||
|
setCurrentExecutionId,
|
||||||
setIsExecuting,
|
setIsExecuting,
|
||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
setBlockRunStatus,
|
setBlockRunStatus,
|
||||||
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
try {
|
try {
|
||||||
const result = await executeWorkflow(
|
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
executionId,
|
|
||||||
undefined,
|
|
||||||
'manual',
|
|
||||||
blockId
|
|
||||||
)
|
|
||||||
if (result && 'success' in result) {
|
|
||||||
setExecutionResult(result)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const errorResult = handleExecutionError(error, { executionId })
|
const errorResult = handleExecutionError(error, { executionId })
|
||||||
return errorResult
|
return errorResult
|
||||||
} finally {
|
} finally {
|
||||||
|
setCurrentExecutionId(workflowId, null)
|
||||||
setIsExecuting(workflowId, false)
|
setIsExecuting(workflowId, false)
|
||||||
setIsDebugging(workflowId, false)
|
setIsDebugging(workflowId, false)
|
||||||
setActiveBlocks(workflowId, new Set())
|
setActiveBlocks(workflowId, new Set())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
[
|
||||||
|
activeWorkflowId,
|
||||||
|
setCurrentExecutionId,
|
||||||
|
setExecutionResult,
|
||||||
|
setIsExecuting,
|
||||||
|
setIsDebugging,
|
||||||
|
setActiveBlocks,
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!activeWorkflowId || !hasHydrated) return
|
||||||
|
|
||||||
|
const entries = useTerminalConsoleStore.getState().entries
|
||||||
|
const runningEntries = entries.filter(
|
||||||
|
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
||||||
|
)
|
||||||
|
if (runningEntries.length === 0) return
|
||||||
|
|
||||||
|
if (activeReconnections.has(activeWorkflowId)) return
|
||||||
|
activeReconnections.add(activeWorkflowId)
|
||||||
|
|
||||||
|
executionStream.cancel(activeWorkflowId)
|
||||||
|
|
||||||
|
const sorted = [...runningEntries].sort((a, b) => {
|
||||||
|
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
||||||
|
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
||||||
|
return bTime - aTime
|
||||||
|
})
|
||||||
|
const executionId = sorted[0].executionId!
|
||||||
|
|
||||||
|
const otherExecutionIds = new Set(
|
||||||
|
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
||||||
|
)
|
||||||
|
if (otherExecutionIds.size > 0) {
|
||||||
|
cancelRunningEntries(activeWorkflowId)
|
||||||
|
}
|
||||||
|
|
||||||
|
setCurrentExecutionId(activeWorkflowId, executionId)
|
||||||
|
setIsExecuting(activeWorkflowId, true)
|
||||||
|
|
||||||
|
const workflowEdges = useWorkflowStore.getState().edges
|
||||||
|
const activeBlocksSet = new Set<string>()
|
||||||
|
const accumulatedBlockLogs: BlockLog[] = []
|
||||||
|
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||||
|
const executedBlockIds = new Set<string>()
|
||||||
|
|
||||||
|
const executionIdRef = { current: executionId }
|
||||||
|
|
||||||
|
const handlers = buildBlockEventHandlers({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
|
executionIdRef,
|
||||||
|
workflowEdges,
|
||||||
|
activeBlocksSet,
|
||||||
|
accumulatedBlockLogs,
|
||||||
|
accumulatedBlockStates,
|
||||||
|
executedBlockIds,
|
||||||
|
consoleMode: 'update',
|
||||||
|
includeStartConsoleEntry: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const originalEntries = entries
|
||||||
|
.filter((e) => e.executionId === executionId)
|
||||||
|
.map((e) => ({ ...e }))
|
||||||
|
|
||||||
|
let cleared = false
|
||||||
|
let reconnectionComplete = false
|
||||||
|
let cleanupRan = false
|
||||||
|
const clearOnce = () => {
|
||||||
|
if (!cleared) {
|
||||||
|
cleared = true
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const reconnectWorkflowId = activeWorkflowId
|
||||||
|
|
||||||
|
executionStream
|
||||||
|
.reconnect({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
callbacks: {
|
||||||
|
onBlockStarted: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockStarted(data)
|
||||||
|
},
|
||||||
|
onBlockCompleted: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockCompleted(data)
|
||||||
|
},
|
||||||
|
onBlockError: (data) => {
|
||||||
|
clearOnce()
|
||||||
|
handlers.onBlockError(data)
|
||||||
|
},
|
||||||
|
onExecutionCompleted: () => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
},
|
||||||
|
onExecutionError: (data) => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
handleExecutionErrorConsole({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
error: data.error,
|
||||||
|
blockLogs: accumulatedBlockLogs,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
onExecutionCancelled: () => {
|
||||||
|
const currentId = useExecutionStore
|
||||||
|
.getState()
|
||||||
|
.getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) {
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
clearOnce()
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId: reconnectWorkflowId,
|
||||||
|
executionId,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.warn('Execution reconnection failed', { executionId, error })
|
||||||
|
})
|
||||||
|
.finally(() => {
|
||||||
|
if (reconnectionComplete || cleanupRan) return
|
||||||
|
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||||
|
if (currentId !== executionId) return
|
||||||
|
reconnectionComplete = true
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
for (const entry of originalEntries) {
|
||||||
|
addConsole({
|
||||||
|
workflowId: entry.workflowId,
|
||||||
|
blockId: entry.blockId,
|
||||||
|
blockName: entry.blockName,
|
||||||
|
blockType: entry.blockType,
|
||||||
|
executionId: entry.executionId,
|
||||||
|
executionOrder: entry.executionOrder,
|
||||||
|
isRunning: false,
|
||||||
|
warning: 'Execution result unavailable — check the logs page',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||||
|
setIsExecuting(reconnectWorkflowId, false)
|
||||||
|
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||||
|
})
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
cleanupRan = true
|
||||||
|
executionStream.cancel(reconnectWorkflowId)
|
||||||
|
activeReconnections.delete(reconnectWorkflowId)
|
||||||
|
|
||||||
|
if (cleared && !reconnectionComplete) {
|
||||||
|
clearExecutionEntries(executionId)
|
||||||
|
for (const entry of originalEntries) {
|
||||||
|
addConsole(entry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [activeWorkflowId, hasHydrated])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
isExecuting,
|
isExecuting,
|
||||||
isDebugging,
|
isDebugging,
|
||||||
|
|||||||
@@ -196,6 +196,8 @@ export interface SubBlockConfig {
|
|||||||
type: SubBlockType
|
type: SubBlockType
|
||||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
mode?: 'basic' | 'advanced' | 'both' | 'trigger' // Default is 'both' if not specified. 'trigger' means only shown in trigger mode
|
||||||
canonicalParamId?: string
|
canonicalParamId?: string
|
||||||
|
/** Controls parameter visibility in agent/tool-input context */
|
||||||
|
paramVisibility?: 'user-or-llm' | 'user-only' | 'llm-only' | 'hidden'
|
||||||
required?:
|
required?:
|
||||||
| boolean
|
| boolean
|
||||||
| {
|
| {
|
||||||
|
|||||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
|||||||
|
|
||||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||||
|
|
||||||
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
||||||
|
|
||||||
Guidelines:
|
Guidelines:
|
||||||
- Use the specific values provided (credential names, channel names, model names)
|
- Use the specific values provided (credential names, channel names, model names)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useRef } from 'react'
|
import { useCallback } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type {
|
import type {
|
||||||
BlockCompletedData,
|
BlockCompletedData,
|
||||||
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
|||||||
|
|
||||||
const logger = createLogger('useExecutionStream')
|
const logger = createLogger('useExecutionStream')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||||
|
* These should be treated as clean disconnects, not execution errors.
|
||||||
|
*/
|
||||||
|
function isClientDisconnectError(error: any): boolean {
|
||||||
|
if (error.name === 'AbortError') return true
|
||||||
|
const msg = (error.message ?? '').toLowerCase()
|
||||||
|
return (
|
||||||
|
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||||
*/
|
*/
|
||||||
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
|
|||||||
parallels?: Record<string, any>
|
parallels?: Record<string, any>
|
||||||
}
|
}
|
||||||
stopAfterBlockId?: string
|
stopAfterBlockId?: string
|
||||||
|
onExecutionId?: (executionId: string) => void
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
|
|||||||
startBlockId: string
|
startBlockId: string
|
||||||
sourceSnapshot: SerializableExecutionState
|
sourceSnapshot: SerializableExecutionState
|
||||||
input?: any
|
input?: any
|
||||||
|
onExecutionId?: (executionId: string) => void
|
||||||
callbacks?: ExecutionStreamCallbacks
|
callbacks?: ExecutionStreamCallbacks
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ReconnectStreamOptions {
|
||||||
|
workflowId: string
|
||||||
|
executionId: string
|
||||||
|
fromEventId?: number
|
||||||
|
callbacks?: ExecutionStreamCallbacks
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Module-level map shared across all hook instances.
|
||||||
|
* Ensures ANY instance can cancel streams started by ANY other instance,
|
||||||
|
* which is critical for SPA navigation where the original hook instance unmounts
|
||||||
|
* but the SSE stream must be cancellable from the new instance.
|
||||||
|
*/
|
||||||
|
const sharedAbortControllers = new Map<string, AbortController>()
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook for executing workflows via server-side SSE streaming.
|
* Hook for executing workflows via server-side SSE streaming.
|
||||||
* Supports concurrent executions via per-workflow AbortController maps.
|
* Supports concurrent executions via per-workflow AbortController maps.
|
||||||
*/
|
*/
|
||||||
export function useExecutionStream() {
|
export function useExecutionStream() {
|
||||||
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
|
||||||
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
|
||||||
new Map()
|
|
||||||
)
|
|
||||||
|
|
||||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||||
const { workflowId, callbacks = {}, ...payload } = options
|
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
||||||
|
|
||||||
const existing = abortControllersRef.current.get(workflowId)
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
abortControllersRef.current.set(workflowId, abortController)
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -177,42 +200,48 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const executionId = response.headers.get('X-Execution-Id')
|
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||||
if (executionId) {
|
if (serverExecutionId) {
|
||||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
onExecutionId?.(serverExecutionId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Execution')
|
await processSSEStream(reader, callbacks, 'Execution')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.name === 'AbortError') {
|
if (isClientDisconnectError(error)) {
|
||||||
logger.info('Execution stream cancelled')
|
logger.info('Execution stream disconnected (page unload or abort)')
|
||||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
return
|
||||||
} else {
|
}
|
||||||
logger.error('Execution stream error:', error)
|
logger.error('Execution stream error:', error)
|
||||||
callbacks.onExecutionError?.({
|
callbacks.onExecutionError?.({
|
||||||
error: error.message || 'Unknown error',
|
error: error.message || 'Unknown error',
|
||||||
duration: 0,
|
duration: 0,
|
||||||
})
|
})
|
||||||
}
|
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
abortControllersRef.current.delete(workflowId)
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
const {
|
||||||
|
workflowId,
|
||||||
|
startBlockId,
|
||||||
|
sourceSnapshot,
|
||||||
|
input,
|
||||||
|
onExecutionId,
|
||||||
|
callbacks = {},
|
||||||
|
} = options
|
||||||
|
|
||||||
const existing = abortControllersRef.current.get(workflowId)
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
if (existing) {
|
if (existing) {
|
||||||
existing.abort()
|
existing.abort()
|
||||||
}
|
}
|
||||||
|
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
abortControllersRef.current.set(workflowId, abortController)
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||||
@@ -246,64 +275,80 @@ export function useExecutionStream() {
|
|||||||
throw new Error('No response body')
|
throw new Error('No response body')
|
||||||
}
|
}
|
||||||
|
|
||||||
const executionId = response.headers.get('X-Execution-Id')
|
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||||
if (executionId) {
|
if (serverExecutionId) {
|
||||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
onExecutionId?.(serverExecutionId)
|
||||||
}
|
}
|
||||||
|
|
||||||
const reader = response.body.getReader()
|
const reader = response.body.getReader()
|
||||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
if (error.name === 'AbortError') {
|
if (isClientDisconnectError(error)) {
|
||||||
logger.info('Run-from-block execution cancelled')
|
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
||||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
return
|
||||||
} else {
|
}
|
||||||
logger.error('Run-from-block execution error:', error)
|
logger.error('Run-from-block execution error:', error)
|
||||||
callbacks.onExecutionError?.({
|
callbacks.onExecutionError?.({
|
||||||
error: error.message || 'Unknown error',
|
error: error.message || 'Unknown error',
|
||||||
duration: 0,
|
duration: 0,
|
||||||
})
|
})
|
||||||
}
|
|
||||||
throw error
|
throw error
|
||||||
} finally {
|
} finally {
|
||||||
abortControllersRef.current.delete(workflowId)
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
||||||
|
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
||||||
|
|
||||||
|
const existing = sharedAbortControllers.get(workflowId)
|
||||||
|
if (existing) {
|
||||||
|
existing.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
const abortController = new AbortController()
|
||||||
|
sharedAbortControllers.set(workflowId, abortController)
|
||||||
|
try {
|
||||||
|
const response = await fetch(
|
||||||
|
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||||
|
{ signal: abortController.signal }
|
||||||
|
)
|
||||||
|
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||||
|
if (!response.body) throw new Error('No response body')
|
||||||
|
|
||||||
|
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||||
|
} catch (error: any) {
|
||||||
|
if (isClientDisconnectError(error)) return
|
||||||
|
logger.error('Reconnection stream error:', error)
|
||||||
|
throw error
|
||||||
|
} finally {
|
||||||
|
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||||
|
sharedAbortControllers.delete(workflowId)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const cancel = useCallback((workflowId?: string) => {
|
const cancel = useCallback((workflowId?: string) => {
|
||||||
if (workflowId) {
|
if (workflowId) {
|
||||||
const execution = currentExecutionsRef.current.get(workflowId)
|
const controller = sharedAbortControllers.get(workflowId)
|
||||||
if (execution) {
|
|
||||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
|
||||||
method: 'POST',
|
|
||||||
}).catch(() => {})
|
|
||||||
}
|
|
||||||
|
|
||||||
const controller = abortControllersRef.current.get(workflowId)
|
|
||||||
if (controller) {
|
if (controller) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
abortControllersRef.current.delete(workflowId)
|
sharedAbortControllers.delete(workflowId)
|
||||||
}
|
}
|
||||||
currentExecutionsRef.current.delete(workflowId)
|
|
||||||
} else {
|
} else {
|
||||||
for (const [, execution] of currentExecutionsRef.current) {
|
for (const [, controller] of sharedAbortControllers) {
|
||||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
|
||||||
method: 'POST',
|
|
||||||
}).catch(() => {})
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const [, controller] of abortControllersRef.current) {
|
|
||||||
controller.abort()
|
controller.abort()
|
||||||
}
|
}
|
||||||
abortControllersRef.current.clear()
|
sharedAbortControllers.clear()
|
||||||
currentExecutionsRef.current.clear()
|
|
||||||
}
|
}
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
execute,
|
execute,
|
||||||
executeFromBlock,
|
executeFromBlock,
|
||||||
|
reconnect,
|
||||||
cancel,
|
cancel,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
@@ -26,119 +26,21 @@ const MAX_BATCH_INTERVAL = 50
|
|||||||
const MIN_BATCH_INTERVAL = 16
|
const MIN_BATCH_INTERVAL = 16
|
||||||
const MAX_QUEUE_SIZE = 5
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
function isWorkflowEditToolCall(toolName?: string, params?: Record<string, unknown>): boolean {
|
/**
|
||||||
if (toolName === 'edit_workflow') return true
|
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
||||||
if (toolName !== 'workflow_change') return false
|
* The server-side orchestrator polls Redis for this decision.
|
||||||
|
*/
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
||||||
if (mode === 'apply') return true
|
fetch(COPILOT_CONFIRM_API_PATH, {
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
method: 'POST',
|
||||||
}
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
}).catch((error) => {
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
logger.warn('Failed to send auto-accept confirmation', {
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
toolCallId,
|
||||||
}
|
error: error instanceof Error ? error.message : String(error),
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
})
|
||||||
}
|
|
||||||
|
|
||||||
function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
return ClientToolCallState.generating
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
case 'executing':
|
|
||||||
return ClientToolCallState.executing
|
|
||||||
case 'success':
|
|
||||||
return ClientToolCallState.success
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return ClientToolCallState.rejected
|
|
||||||
case 'aborted':
|
|
||||||
return ClientToolCallState.aborted
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return ClientToolCallState.error
|
|
||||||
default:
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractToolUiMetadata(data: Record<string, unknown>): CopilotToolCall['ui'] | undefined {
|
|
||||||
const ui = asRecord(data.ui)
|
|
||||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
|
||||||
const autoAllowedFromUi = ui.autoAllowed === true
|
|
||||||
const autoAllowedFromData = data.autoAllowed === true
|
|
||||||
return {
|
|
||||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
|
||||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
|
||||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
|
||||||
showInterrupt: ui.showInterrupt === true,
|
|
||||||
showRemember: ui.showRemember === true,
|
|
||||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
|
||||||
actions: Array.isArray(ui.actions)
|
|
||||||
? ui.actions
|
|
||||||
.map((action) => {
|
|
||||||
const a = asRecord(action)
|
|
||||||
const id = typeof a.id === 'string' ? a.id : undefined
|
|
||||||
const label = typeof a.label === 'string' ? a.label : undefined
|
|
||||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
|
||||||
if (!id || !label) return null
|
|
||||||
return {
|
|
||||||
id,
|
|
||||||
label,
|
|
||||||
kind,
|
|
||||||
remember: a.remember === true,
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
|
||||||
: undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractToolExecutionMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['execution'] | undefined {
|
|
||||||
const execution = asRecord(data.execution)
|
|
||||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
|
||||||
return {
|
|
||||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
|
||||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowChangeApplyCall(toolName?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (toolName !== 'workflow_change') return false
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractWorkflowStateFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): WorkflowState | null {
|
|
||||||
const directState = asRecord(resultPayload.workflowState)
|
|
||||||
if (directState) return directState as unknown as WorkflowState
|
|
||||||
|
|
||||||
const editResult = asRecord(resultPayload.editResult)
|
|
||||||
const nestedState = asRecord(editResult?.workflowState)
|
|
||||||
if (nestedState) return nestedState as unknown as WorkflowState
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractOperationListFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): Array<Record<string, unknown>> | undefined {
|
|
||||||
const operations = resultPayload.operations
|
|
||||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
const compiled = resultPayload.compiledOperations
|
|
||||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
@@ -342,28 +244,14 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
try {
|
try {
|
||||||
const eventData = asRecord(data?.data)
|
const eventData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId ||
|
data?.toolCallId || (eventData.id as string | undefined)
|
||||||
(eventData.id as string | undefined) ||
|
|
||||||
(eventData.callId as string | undefined)
|
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = data?.success
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const resultObj = asRecord(data?.result)
|
const resultObj = asRecord(data?.result)
|
||||||
const skipped: boolean = resultObj.skipped === true
|
const skipped: boolean = resultObj.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const uiMetadata = extractToolUiMetadata(eventData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(eventData)
|
|
||||||
const serverState = (eventData.state as string | undefined) || undefined
|
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: failedDependency || skipped
|
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
|
||||||
if (current) {
|
if (current) {
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
@@ -372,32 +260,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (
|
const targetState = success
|
||||||
targetState === ClientToolCallState.success &&
|
? ClientToolCallState.success
|
||||||
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
: failedDependency || skipped
|
||||||
) {
|
? ClientToolCallState.rejected
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
: ClientToolCallState.error
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
paramsForCurrentToolCall = {
|
|
||||||
...(current.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
ui: uiMetadata || current.ui,
|
|
||||||
execution: executionMetadata || current.execution,
|
|
||||||
params: paramsForCurrentToolCall,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
current.name,
|
|
||||||
targetState,
|
|
||||||
current.id,
|
|
||||||
paramsForCurrentToolCall
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
@@ -440,39 +312,31 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (current.name === 'edit_workflow') {
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
isWorkflowEditToolCall(current.name, paramsForCurrentToolCall)
|
|
||||||
) {
|
|
||||||
try {
|
try {
|
||||||
const workflowState = resultPayload
|
const resultPayload = asRecord(
|
||||||
? extractWorkflowStateFromResultPayload(resultPayload)
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
: null
|
)
|
||||||
const hasWorkflowState = !!workflowState
|
const workflowState = asRecord(resultPayload?.workflowState)
|
||||||
logger.info('[SSE] workflow edit result received', {
|
const hasWorkflowState = !!resultPayload?.workflowState
|
||||||
toolName: current.name,
|
logger.info('[SSE] edit_workflow result received', {
|
||||||
hasWorkflowState,
|
hasWorkflowState,
|
||||||
blockCount: hasWorkflowState
|
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||||
? Object.keys((workflowState as any).blocks ?? {}).length
|
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||||
: 0,
|
|
||||||
edgeCount:
|
|
||||||
hasWorkflowState && Array.isArray((workflowState as any).edges)
|
|
||||||
? (workflowState as any).edges.length
|
|
||||||
: 0,
|
|
||||||
})
|
})
|
||||||
if (workflowState) {
|
if (hasWorkflowState) {
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
diffStore.setProposedChanges(workflowState).catch((err) => {
|
diffStore
|
||||||
logger.error('[SSE] Failed to apply workflow edit diff', {
|
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||||
error: err instanceof Error ? err.message : String(err),
|
error: err instanceof Error ? err.message : String(err),
|
||||||
toolName: current.name,
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('[SSE] workflow edit result handling failed', {
|
logger.error('[SSE] edit_workflow result handling failed', {
|
||||||
error: err instanceof Error ? err.message : String(err),
|
error: err instanceof Error ? err.message : String(err),
|
||||||
toolName: current.name,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -596,23 +460,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
: failedDependency || skipped
|
: failedDependency || skipped
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
const paramsForBlock =
|
|
||||||
b.toolCall?.id === toolCallId
|
|
||||||
? paramsForCurrentToolCall || b.toolCall?.params
|
|
||||||
: b.toolCall?.params
|
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
params: paramsForBlock,
|
|
||||||
ui: uiMetadata || b.toolCall?.ui,
|
|
||||||
execution: executionMetadata || b.toolCall?.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveToolDisplay(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
paramsForBlock
|
b.toolCall?.params
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -630,9 +487,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
try {
|
try {
|
||||||
const errorData = asRecord(data?.data)
|
const errorData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId ||
|
data?.toolCallId || (errorData.id as string | undefined)
|
||||||
(errorData.id as string | undefined) ||
|
|
||||||
(errorData.callId as string | undefined)
|
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
@@ -645,18 +500,12 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = errorData.state
|
const targetState = failedDependency
|
||||||
? mapServerStateToClientState(errorData.state)
|
|
||||||
: failedDependency
|
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
const uiMetadata = extractToolUiMetadata(errorData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
ui: uiMetadata || current.ui,
|
|
||||||
execution: executionMetadata || current.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
}
|
}
|
||||||
@@ -671,19 +520,13 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
isBackgroundState(b.toolCall?.state)
|
isBackgroundState(b.toolCall?.state)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
const targetState = errorData.state
|
const targetState = failedDependency
|
||||||
? mapServerStateToClientState(errorData.state)
|
|
||||||
: failedDependency
|
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
const uiMetadata = extractToolUiMetadata(errorData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
ui: uiMetadata || b.toolCall?.ui,
|
|
||||||
execution: executionMetadata || b.toolCall?.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(
|
display: resolveToolDisplay(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
@@ -704,26 +547,19 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
tool_generating: (data, context, get, set) => {
|
tool_generating: (data, context, get, set) => {
|
||||||
const eventData = asRecord(data?.data)
|
const { toolCallId, toolName } = data
|
||||||
const toolCallId =
|
|
||||||
data?.toolCallId ||
|
|
||||||
(eventData.id as string | undefined) ||
|
|
||||||
(eventData.callId as string | undefined)
|
|
||||||
const toolName =
|
|
||||||
data?.toolName ||
|
|
||||||
(eventData.name as string | undefined) ||
|
|
||||||
(eventData.toolName as string | undefined)
|
|
||||||
if (!toolCallId || !toolName) return
|
if (!toolCallId || !toolName) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
if (!toolCallsById[toolCallId]) {
|
if (!toolCallsById[toolCallId]) {
|
||||||
const initialState = ClientToolCallState.generating
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
|
const initialState = isAutoAllowed
|
||||||
|
? ClientToolCallState.executing
|
||||||
|
: ClientToolCallState.pending
|
||||||
const tc: CopilotToolCall = {
|
const tc: CopilotToolCall = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: extractToolUiMetadata(eventData),
|
|
||||||
execution: extractToolExecutionMetadata(eventData),
|
|
||||||
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
@@ -736,27 +572,17 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
tool_call: (data, context, get, set) => {
|
tool_call: (data, context, get, set) => {
|
||||||
const toolData = asRecord(data?.data)
|
const toolData = asRecord(data?.data)
|
||||||
const id: string | undefined =
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
(toolData.id as string | undefined) ||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
(toolData.callId as string | undefined) ||
|
|
||||||
data?.toolCallId
|
|
||||||
const name: string | undefined =
|
|
||||||
(toolData.name as string | undefined) ||
|
|
||||||
(toolData.toolName as string | undefined) ||
|
|
||||||
data?.toolName
|
|
||||||
if (!id) return
|
if (!id) return
|
||||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
const serverState = toolData.state
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
const existing = toolCallsById[id]
|
const existing = toolCallsById[id]
|
||||||
const toolName = name || existing?.name || 'unknown_tool'
|
const toolName = name || existing?.name || 'unknown_tool'
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -771,8 +597,6 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
...existing,
|
...existing,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||||
}
|
}
|
||||||
@@ -780,8 +604,6 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||||
}
|
}
|
||||||
@@ -796,12 +618,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = next.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// We run immediately only when no interrupt is required.
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
if (isClientRunCapability(next) && !shouldInterrupt) {
|
// for these tools in interactive mode; the client reports back via mark-complete.
|
||||||
executeRunToolOnClient(id, toolName, args || next.params || {})
|
if (
|
||||||
|
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
||||||
|
initialState === ClientToolCallState.executing
|
||||||
|
) {
|
||||||
|
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
||||||
}
|
}
|
||||||
|
|
||||||
// OAuth: dispatch event to open the OAuth connect modal
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
|
|||||||
@@ -9,10 +9,9 @@ import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
|||||||
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
import {
|
import {
|
||||||
type SSEHandler,
|
type SSEHandler,
|
||||||
|
sendAutoAcceptConfirmation,
|
||||||
sseHandlers,
|
sseHandlers,
|
||||||
updateStreamingMessage,
|
updateStreamingMessage,
|
||||||
} from './handlers'
|
} from './handlers'
|
||||||
@@ -25,113 +24,6 @@ type StoreSet = (
|
|||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
) => void
|
) => void
|
||||||
|
|
||||||
function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
return ClientToolCallState.generating
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
case 'executing':
|
|
||||||
return ClientToolCallState.executing
|
|
||||||
case 'success':
|
|
||||||
return ClientToolCallState.success
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return ClientToolCallState.rejected
|
|
||||||
case 'aborted':
|
|
||||||
return ClientToolCallState.aborted
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return ClientToolCallState.error
|
|
||||||
default:
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractToolUiMetadata(data: Record<string, unknown>): CopilotToolCall['ui'] | undefined {
|
|
||||||
const ui = asRecord(data.ui)
|
|
||||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
|
||||||
const autoAllowedFromUi = ui.autoAllowed === true
|
|
||||||
const autoAllowedFromData = data.autoAllowed === true
|
|
||||||
return {
|
|
||||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
|
||||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
|
||||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
|
||||||
showInterrupt: ui.showInterrupt === true,
|
|
||||||
showRemember: ui.showRemember === true,
|
|
||||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
|
||||||
actions: Array.isArray(ui.actions)
|
|
||||||
? ui.actions
|
|
||||||
.map((action) => {
|
|
||||||
const a = asRecord(action)
|
|
||||||
const id = typeof a.id === 'string' ? a.id : undefined
|
|
||||||
const label = typeof a.label === 'string' ? a.label : undefined
|
|
||||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
|
||||||
if (!id || !label) return null
|
|
||||||
return {
|
|
||||||
id,
|
|
||||||
label,
|
|
||||||
kind,
|
|
||||||
remember: a.remember === true,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
|
||||||
: undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractToolExecutionMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['execution'] | undefined {
|
|
||||||
const execution = asRecord(data.execution)
|
|
||||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
|
||||||
return {
|
|
||||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
|
||||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowChangeApplyCall(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.name !== 'workflow_change') return false
|
|
||||||
const params = (toolCall.params || {}) as Record<string, unknown>
|
|
||||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractWorkflowStateFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): WorkflowState | null {
|
|
||||||
const directState = asRecord(resultPayload.workflowState)
|
|
||||||
if (directState) return directState as unknown as WorkflowState
|
|
||||||
|
|
||||||
const editResult = asRecord(resultPayload.editResult)
|
|
||||||
const nestedState = asRecord(editResult?.workflowState)
|
|
||||||
if (nestedState) return nestedState as unknown as WorkflowState
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractOperationListFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): Array<Record<string, unknown>> | undefined {
|
|
||||||
const operations = resultPayload.operations
|
|
||||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
const compiled = resultPayload.compiledOperations
|
|
||||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
export function appendSubAgentContent(
|
export function appendSubAgentContent(
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
parentToolCallId: string,
|
parentToolCallId: string,
|
||||||
@@ -272,8 +164,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
if (!id || !name) return
|
if (!id || !name) return
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
|
|
||||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
@@ -309,10 +199,9 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const existingToolCall =
|
const existingToolCall =
|
||||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||||
|
|
||||||
const serverState = toolData.state
|
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(name)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -326,8 +215,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveToolDisplay(name, initialState, id, args),
|
display: resolveToolDisplay(name, initialState, id, args),
|
||||||
}
|
}
|
||||||
@@ -354,11 +241,16 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: if auto-allowed, execute immediately for
|
||||||
// Execute immediately only for non-interrupting calls.
|
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
||||||
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
||||||
executeRunToolOnClient(id, name, args || {})
|
executeRunToolOnClient(id, name, args || {})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -383,45 +275,17 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
const serverState = resultData.state
|
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const uiMetadata = extractToolUiMetadata(resultData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(resultData)
|
|
||||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
let nextParams = existing.params
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || resultData.result || resultData.data || data?.data
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
isWorkflowChangeApplyCall(existing) &&
|
|
||||||
resultPayload
|
|
||||||
) {
|
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload)
|
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
nextParams = {
|
|
||||||
...(existing.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
params: nextParams,
|
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, nextParams),
|
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||||
}
|
}
|
||||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
@@ -445,23 +309,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
state: targetState,
|
state: targetState,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
resultPayload &&
|
|
||||||
isWorkflowChangeApplyCall(updatedSubAgentToolCall)
|
|
||||||
) {
|
|
||||||
const workflowState = extractWorkflowStateFromResultPayload(resultPayload)
|
|
||||||
if (workflowState) {
|
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
|
||||||
diffStore.setProposedChanges(workflowState).catch((error) => {
|
|
||||||
logger.error('[SubAgent] Failed to apply workflow_change diff', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
toolCallId,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
|||||||
@@ -101,6 +101,9 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
|||||||
/** POST — revert to a checkpoint. */
|
/** POST — revert to a checkpoint. */
|
||||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
|
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||||
|
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||||
|
|
||||||
/** GET — fetch dynamically available copilot models. */
|
/** GET — fetch dynamically available copilot models. */
|
||||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||||
|
|
||||||
|
|||||||
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
export const INTERRUPT_TOOL_NAMES = [
|
||||||
|
'set_global_workflow_variables',
|
||||||
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
|
'manage_mcp_tool',
|
||||||
|
'manage_custom_tool',
|
||||||
|
'deploy_mcp',
|
||||||
|
'deploy_chat',
|
||||||
|
'deploy_api',
|
||||||
|
'create_workspace_mcp_server',
|
||||||
|
'set_environment_variables',
|
||||||
|
'make_api_request',
|
||||||
|
'oauth_request_access',
|
||||||
|
'navigate_ui',
|
||||||
|
'knowledge_base',
|
||||||
|
'generate_api_key',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_NAMES = [
|
||||||
|
'debug',
|
||||||
|
'edit',
|
||||||
|
'build',
|
||||||
|
'plan',
|
||||||
|
'test',
|
||||||
|
'deploy',
|
||||||
|
'auth',
|
||||||
|
'research',
|
||||||
|
'knowledge',
|
||||||
|
'custom_tool',
|
||||||
|
'tour',
|
||||||
|
'info',
|
||||||
|
'workflow',
|
||||||
|
'evaluate',
|
||||||
|
'superagent',
|
||||||
|
'discovery',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Respond tools are internal to the copilot's subagent system.
|
||||||
|
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||||
|
* The copilot backend handles these internally.
|
||||||
|
*/
|
||||||
|
export const RESPOND_TOOL_NAMES = [
|
||||||
|
'plan_respond',
|
||||||
|
'edit_respond',
|
||||||
|
'build_respond',
|
||||||
|
'debug_respond',
|
||||||
|
'info_respond',
|
||||||
|
'research_respond',
|
||||||
|
'deploy_respond',
|
||||||
|
'superagent_respond',
|
||||||
|
'discovery_respond',
|
||||||
|
'tour_respond',
|
||||||
|
'auth_respond',
|
||||||
|
'workflow_respond',
|
||||||
|
'knowledge_respond',
|
||||||
|
'custom_tool_respond',
|
||||||
|
'test_respond',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||||
@@ -1,12 +1,17 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
getEventData,
|
getEventData,
|
||||||
markToolResultSeen,
|
markToolResultSeen,
|
||||||
wasToolResultSeen,
|
wasToolResultSeen,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
import {
|
||||||
|
isIntegrationTool,
|
||||||
|
isToolAvailableOnSimSide,
|
||||||
|
markToolComplete,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
import type {
|
import type {
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -17,6 +22,7 @@ import type {
|
|||||||
} from '@/lib/copilot/orchestrator/types'
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
executeToolAndReport,
|
executeToolAndReport,
|
||||||
|
isInterruptToolName,
|
||||||
waitForToolCompletion,
|
waitForToolCompletion,
|
||||||
waitForToolDecision,
|
waitForToolDecision,
|
||||||
} from './tool-execution'
|
} from './tool-execution'
|
||||||
@@ -35,113 +41,6 @@ const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
|||||||
'run_block',
|
'run_block',
|
||||||
])
|
])
|
||||||
|
|
||||||
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return 'pending'
|
|
||||||
case 'executing':
|
|
||||||
return 'executing'
|
|
||||||
case 'success':
|
|
||||||
return 'success'
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return 'rejected'
|
|
||||||
case 'aborted':
|
|
||||||
return 'skipped'
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return 'error'
|
|
||||||
default:
|
|
||||||
return 'pending'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getExecutionTarget(
|
|
||||||
toolData: Record<string, unknown>,
|
|
||||||
toolName: string
|
|
||||||
): { target: string; capabilityId?: string } {
|
|
||||||
const execution = asRecord(toolData.execution)
|
|
||||||
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
|
||||||
return {
|
|
||||||
target: execution.target,
|
|
||||||
capabilityId:
|
|
||||||
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback only when metadata is missing.
|
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
|
||||||
}
|
|
||||||
return { target: 'sim_server' }
|
|
||||||
}
|
|
||||||
|
|
||||||
function needsApproval(toolData: Record<string, unknown>): boolean {
|
|
||||||
const ui = asRecord(toolData.ui)
|
|
||||||
return ui.showInterrupt === true
|
|
||||||
}
|
|
||||||
|
|
||||||
async function waitForClientCapabilityAndReport(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
options: OrchestratorOptions,
|
|
||||||
logScope: string
|
|
||||||
): Promise<void> {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCall.id,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (completion?.status === 'rejected') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
|
||||||
.catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
@@ -186,11 +85,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
current.status = data?.state
|
current.status = success ? 'success' : 'error'
|
||||||
? mapServerStateToToolStatus(data.state)
|
|
||||||
: success
|
|
||||||
? 'success'
|
|
||||||
: 'error'
|
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
if (hasResultData) {
|
if (hasResultData) {
|
||||||
current.result = {
|
current.result = {
|
||||||
@@ -209,7 +104,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const current = context.toolCalls.get(toolCallId)
|
const current = context.toolCalls.get(toolCallId)
|
||||||
if (!current) return
|
if (!current) return
|
||||||
current.status = data?.state ? mapServerStateToToolStatus(data.state) : 'error'
|
current.status = 'error'
|
||||||
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
},
|
},
|
||||||
@@ -226,7 +121,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: data?.state ? mapServerStateToToolStatus(data.state) : 'pending',
|
status: 'pending',
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -261,7 +156,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
@@ -275,29 +170,83 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
const execution = getExecutionTarget(toolData, toolName)
|
// Subagent tools are executed by the copilot backend, not sim side.
|
||||||
const isInteractive = options.interactive === true
|
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
return
|
||||||
if (toolData.state) {
|
|
||||||
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (requiresApproval) {
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
|
// The copilot backend handles these internally to signal subagent completion.
|
||||||
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isInterruptTool = isInterruptToolName(toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
// Integration tools (user-installed) also require approval in interactive mode
|
||||||
|
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
||||||
|
|
||||||
|
if (needsApproval && isInteractive) {
|
||||||
const decision = await waitForToolDecision(
|
const decision = await waitForToolDecision(
|
||||||
toolCallId,
|
toolCallId,
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
options.abortSignal
|
options.abortSignal
|
||||||
)
|
)
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
// Client-executable run tools: defer execution to the browser client.
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, 'run tool')
|
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
||||||
|
// (block pulsing, logs, stop button) and reports completion via
|
||||||
|
// /api/copilot/confirm with status success/error. We poll Redis for
|
||||||
|
// that completion signal, then fire-and-forget markToolComplete to Go.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg =
|
||||||
|
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
// Fire-and-forget: tell Go backend the tool is done
|
||||||
|
// (must NOT await — see deadlock note in executeToolAndReport)
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (execution.target === 'sim_server' || execution.target === 'sim_client_capability') {
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -359,15 +308,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
if (options.autoExecuteTools !== false) {
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, 'run tool')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
|
||||||
options.autoExecuteTools !== false
|
|
||||||
) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -469,7 +410,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall: ToolCallState = {
|
const toolCall: ToolCallState = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
}
|
}
|
||||||
@@ -487,26 +428,37 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (isPartial) return
|
if (isPartial) return
|
||||||
|
|
||||||
const execution = getExecutionTarget(toolData, toolName)
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
const isInteractive = options.interactive === true
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (requiresApproval) {
|
// Tools that only exist on the Go backend (e.g. search_patterns,
|
||||||
|
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
||||||
|
// The Go backend already executed them and will send its own tool_result
|
||||||
|
// SSE event with the real outcome. Trying to execute them here would fail
|
||||||
|
// with "Tool not found" and incorrectly mark the tool as failed.
|
||||||
|
if (!isToolAvailableOnSimSide(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interrupt tools and integration tools (user-installed) require approval
|
||||||
|
// in interactive mode, same as top-level handler.
|
||||||
|
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
||||||
|
if (options.interactive === true && needsSubagentApproval) {
|
||||||
const decision = await waitForToolDecision(
|
const decision = await waitForToolDecision(
|
||||||
toolCallId,
|
toolCallId,
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
options.abortSignal
|
options.abortSignal
|
||||||
)
|
)
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, 'subagent run tool')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (execution.target === 'sim_server' || execution.target === 'sim_client_capability') {
|
|
||||||
if (options.autoExecuteTools !== false) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
@@ -565,15 +517,66 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
// Client-executable run tools in interactive mode: defer to client.
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, 'subagent run tool')
|
// Same pattern as main handler: wait for client completion, then tell Go.
|
||||||
|
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'rejected') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
completion.message || 'Tool execution rejected'
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (options.autoExecuteTools !== false) {
|
||||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
|
||||||
options.autoExecuteTools !== false
|
|
||||||
) {
|
|
||||||
await executeToolAndReport(toolCallId, context, execContext, options)
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -593,7 +596,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
const status = success ? 'success' : 'error'
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import {
|
|||||||
TOOL_DECISION_MAX_POLL_MS,
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
TOOL_DECISION_POLL_BACKOFF,
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
} from '@/lib/copilot/constants'
|
} from '@/lib/copilot/constants'
|
||||||
|
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
@@ -20,6 +21,10 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotSseToolExecution')
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
|
export function isInterruptToolName(toolName: string): boolean {
|
||||||
|
return INTERRUPT_TOOL_SET.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
export async function executeToolAndReport(
|
export async function executeToolAndReport(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
context: StreamingContext,
|
context: StreamingContext,
|
||||||
@@ -29,11 +34,9 @@ export async function executeToolAndReport(
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
if (toolCall.status === 'executing') return
|
||||||
if (lockable.__simExecuting) return
|
|
||||||
if (wasToolResultSeen(toolCall.id)) return
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
lockable.__simExecuting = true
|
|
||||||
toolCall.status = 'executing'
|
toolCall.status = 'executing'
|
||||||
try {
|
try {
|
||||||
const result = await executeToolServerSide(toolCall, execContext)
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
@@ -119,8 +122,6 @@ export async function executeToolAndReport(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
await options?.onEvent?.(errorEvent)
|
await options?.onEvent?.(errorEvent)
|
||||||
} finally {
|
|
||||||
delete lockable.__simExecuting
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -325,10 +325,6 @@ const SERVER_TOOLS = new Set<string>([
|
|||||||
'get_block_config',
|
'get_block_config',
|
||||||
'get_trigger_blocks',
|
'get_trigger_blocks',
|
||||||
'edit_workflow',
|
'edit_workflow',
|
||||||
'workflow_context_get',
|
|
||||||
'workflow_context_expand',
|
|
||||||
'workflow_change',
|
|
||||||
'workflow_verify',
|
|
||||||
'get_workflow_console',
|
'get_workflow_console',
|
||||||
'search_documentation',
|
'search_documentation',
|
||||||
'search_online',
|
'search_online',
|
||||||
|
|||||||
@@ -609,83 +609,6 @@ const META_edit_workflow: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_workflow_change: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
|
||||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'dry_run') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Planned workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Planning workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Applied workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Applying workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
isSpecial: true,
|
|
||||||
customRenderer: 'edit_summary',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_context_get: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_context_expand: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_verify: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_evaluate: ToolMetadata = {
|
const META_evaluate: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||||
@@ -2619,10 +2542,6 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
|||||||
deploy_mcp: META_deploy_mcp,
|
deploy_mcp: META_deploy_mcp,
|
||||||
edit: META_edit,
|
edit: META_edit,
|
||||||
edit_workflow: META_edit_workflow,
|
edit_workflow: META_edit_workflow,
|
||||||
workflow_context_get: META_workflow_context_get,
|
|
||||||
workflow_context_expand: META_workflow_context_expand,
|
|
||||||
workflow_change: META_workflow_change,
|
|
||||||
workflow_verify: META_workflow_verify,
|
|
||||||
evaluate: META_evaluate,
|
evaluate: META_evaluate,
|
||||||
get_block_config: META_get_block_config,
|
get_block_config: META_get_block_config,
|
||||||
get_block_options: META_get_block_options,
|
get_block_options: META_get_block_options,
|
||||||
|
|||||||
@@ -13,12 +13,6 @@ import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-cr
|
|||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||||
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
|
||||||
import {
|
|
||||||
workflowContextExpandServerTool,
|
|
||||||
workflowContextGetServerTool,
|
|
||||||
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
|
||||||
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
@@ -41,10 +35,6 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||||
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
|
||||||
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
|
||||||
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
|
||||||
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,93 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
|
|
||||||
type StoreEntry<T> = {
|
|
||||||
value: T
|
|
||||||
expiresAt: number
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
|
||||||
const MAX_ENTRIES = 500
|
|
||||||
|
|
||||||
class TTLStore<T> {
|
|
||||||
private readonly data = new Map<string, StoreEntry<T>>()
|
|
||||||
|
|
||||||
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
|
||||||
|
|
||||||
set(value: T): string {
|
|
||||||
this.gc()
|
|
||||||
if (this.data.size >= MAX_ENTRIES) {
|
|
||||||
const firstKey = this.data.keys().next().value as string | undefined
|
|
||||||
if (firstKey) {
|
|
||||||
this.data.delete(firstKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
this.data.set(id, {
|
|
||||||
value,
|
|
||||||
expiresAt: Date.now() + this.ttlMs,
|
|
||||||
})
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
get(id: string): T | null {
|
|
||||||
const entry = this.data.get(id)
|
|
||||||
if (!entry) return null
|
|
||||||
if (entry.expiresAt <= Date.now()) {
|
|
||||||
this.data.delete(id)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return entry.value
|
|
||||||
}
|
|
||||||
|
|
||||||
private gc(): void {
|
|
||||||
const now = Date.now()
|
|
||||||
for (const [key, entry] of this.data.entries()) {
|
|
||||||
if (entry.expiresAt <= now) {
|
|
||||||
this.data.delete(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowContextPack = {
|
|
||||||
workflowId: string
|
|
||||||
snapshotHash: string
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
schemasByType: Record<string, any>
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
summary: Record<string, any>
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowChangeProposal = {
|
|
||||||
workflowId: string
|
|
||||||
baseSnapshotHash: string
|
|
||||||
compiledOperations: Array<Record<string, any>>
|
|
||||||
diffSummary: Record<string, any>
|
|
||||||
warnings: string[]
|
|
||||||
diagnostics: string[]
|
|
||||||
touchedBlocks: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
|
||||||
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
|
||||||
|
|
||||||
export function saveContextPack(pack: WorkflowContextPack): string {
|
|
||||||
return contextPackStore.set(pack)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getContextPack(id: string): WorkflowContextPack | null {
|
|
||||||
return contextPackStore.get(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function saveProposal(proposal: WorkflowChangeProposal): string {
|
|
||||||
return proposalStore.set(proposal)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getProposal(id: string): WorkflowChangeProposal | null {
|
|
||||||
return proposalStore.get(id)
|
|
||||||
}
|
|
||||||
@@ -1,987 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { getBlock } from '@/blocks/registry'
|
|
||||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
|
||||||
import {
|
|
||||||
getContextPack,
|
|
||||||
getProposal,
|
|
||||||
saveProposal,
|
|
||||||
type WorkflowChangeProposal,
|
|
||||||
} from './change-store'
|
|
||||||
import { editWorkflowServerTool } from './edit-workflow'
|
|
||||||
import { applyOperationsToWorkflowState } from './edit-workflow/engine'
|
|
||||||
import { preValidateCredentialInputs } from './edit-workflow/validation'
|
|
||||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowChangeServerTool')
|
|
||||||
|
|
||||||
const TargetSchema = z
|
|
||||||
.object({
|
|
||||||
blockId: z.string().optional(),
|
|
||||||
alias: z.string().optional(),
|
|
||||||
match: z
|
|
||||||
.object({
|
|
||||||
type: z.string().optional(),
|
|
||||||
name: z.string().optional(),
|
|
||||||
})
|
|
||||||
.optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const CredentialSelectionSchema = z
|
|
||||||
.object({
|
|
||||||
strategy: z.enum(['first_connected', 'by_id', 'by_name']).optional(),
|
|
||||||
id: z.string().optional(),
|
|
||||||
name: z.string().optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const ChangeOperationSchema = z
|
|
||||||
.object({
|
|
||||||
op: z.enum(['set', 'unset', 'merge', 'append', 'remove', 'attach_credential']),
|
|
||||||
path: z.string().optional(),
|
|
||||||
value: z.any().optional(),
|
|
||||||
provider: z.string().optional(),
|
|
||||||
selection: CredentialSelectionSchema.optional(),
|
|
||||||
required: z.boolean().optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const MutationSchema = z
|
|
||||||
.object({
|
|
||||||
action: z.enum([
|
|
||||||
'ensure_block',
|
|
||||||
'patch_block',
|
|
||||||
'remove_block',
|
|
||||||
'connect',
|
|
||||||
'disconnect',
|
|
||||||
'ensure_variable',
|
|
||||||
'set_variable',
|
|
||||||
]),
|
|
||||||
target: TargetSchema.optional(),
|
|
||||||
type: z.string().optional(),
|
|
||||||
name: z.string().optional(),
|
|
||||||
inputs: z.record(z.any()).optional(),
|
|
||||||
triggerMode: z.boolean().optional(),
|
|
||||||
advancedMode: z.boolean().optional(),
|
|
||||||
enabled: z.boolean().optional(),
|
|
||||||
changes: z.array(ChangeOperationSchema).optional(),
|
|
||||||
from: TargetSchema.optional(),
|
|
||||||
to: TargetSchema.optional(),
|
|
||||||
handle: z.string().optional(),
|
|
||||||
toHandle: z.string().optional(),
|
|
||||||
mode: z.enum(['set', 'append', 'remove']).optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const LinkEndpointSchema = z
|
|
||||||
.object({
|
|
||||||
blockId: z.string().optional(),
|
|
||||||
alias: z.string().optional(),
|
|
||||||
match: z
|
|
||||||
.object({
|
|
||||||
type: z.string().optional(),
|
|
||||||
name: z.string().optional(),
|
|
||||||
})
|
|
||||||
.optional(),
|
|
||||||
handle: z.string().optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const LinkSchema = z
|
|
||||||
.object({
|
|
||||||
from: LinkEndpointSchema,
|
|
||||||
to: LinkEndpointSchema,
|
|
||||||
mode: z.enum(['set', 'append', 'remove']).optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const ChangeSpecSchema = z
|
|
||||||
.object({
|
|
||||||
objective: z.string().optional(),
|
|
||||||
constraints: z.record(z.any()).optional(),
|
|
||||||
resources: z.record(z.any()).optional(),
|
|
||||||
mutations: z.array(MutationSchema).optional(),
|
|
||||||
links: z.array(LinkSchema).optional(),
|
|
||||||
acceptance: z.array(z.any()).optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
const WorkflowChangeInputSchema = z
|
|
||||||
.object({
|
|
||||||
mode: z.enum(['dry_run', 'apply']),
|
|
||||||
workflowId: z.string().optional(),
|
|
||||||
contextPackId: z.string().optional(),
|
|
||||||
proposalId: z.string().optional(),
|
|
||||||
baseSnapshotHash: z.string().optional(),
|
|
||||||
expectedSnapshotHash: z.string().optional(),
|
|
||||||
changeSpec: ChangeSpecSchema.optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
type WorkflowChangeParams = z.input<typeof WorkflowChangeInputSchema>
|
|
||||||
type ChangeSpec = z.input<typeof ChangeSpecSchema>
|
|
||||||
type TargetRef = z.input<typeof TargetSchema>
|
|
||||||
type ChangeOperation = z.input<typeof ChangeOperationSchema>
|
|
||||||
|
|
||||||
type CredentialRecord = {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
provider: string
|
|
||||||
isDefault?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConnectionTarget = {
|
|
||||||
block: string
|
|
||||||
handle?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConnectionState = Map<string, Map<string, ConnectionTarget[]>>
|
|
||||||
|
|
||||||
function createDraftBlockId(seed?: string): string {
|
|
||||||
const suffix = crypto.randomUUID().slice(0, 8)
|
|
||||||
const base = seed ? seed.replace(/[^a-zA-Z0-9]/g, '').slice(0, 24) : 'draft'
|
|
||||||
return `${base || 'draft'}_${suffix}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeHandle(handle?: string): string {
|
|
||||||
if (!handle) return 'source'
|
|
||||||
if (handle === 'success') return 'source'
|
|
||||||
return handle
|
|
||||||
}
|
|
||||||
|
|
||||||
function deepClone<T>(value: T): T {
|
|
||||||
return JSON.parse(JSON.stringify(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
function stableUnique(values: string[]): string[] {
|
|
||||||
return [...new Set(values.filter(Boolean))]
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildConnectionState(workflowState: {
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
}): ConnectionState {
|
|
||||||
const state: ConnectionState = new Map()
|
|
||||||
for (const edge of workflowState.edges || []) {
|
|
||||||
const source = String(edge.source || '')
|
|
||||||
const target = String(edge.target || '')
|
|
||||||
if (!source || !target) continue
|
|
||||||
const sourceHandle = normalizeHandle(String(edge.sourceHandle || 'source'))
|
|
||||||
const targetHandle = edge.targetHandle ? String(edge.targetHandle) : undefined
|
|
||||||
|
|
||||||
let handleMap = state.get(source)
|
|
||||||
if (!handleMap) {
|
|
||||||
handleMap = new Map()
|
|
||||||
state.set(source, handleMap)
|
|
||||||
}
|
|
||||||
const existing = handleMap.get(sourceHandle) || []
|
|
||||||
existing.push({ block: target, handle: targetHandle })
|
|
||||||
handleMap.set(sourceHandle, existing)
|
|
||||||
}
|
|
||||||
return state
|
|
||||||
}
|
|
||||||
|
|
||||||
function connectionStateToPayload(state: Map<string, ConnectionTarget[]>): Record<string, any> {
|
|
||||||
const payload: Record<string, any> = {}
|
|
||||||
for (const [handle, targets] of state.entries()) {
|
|
||||||
if (!targets || targets.length === 0) continue
|
|
||||||
const normalizedTargets = targets.map((target) => {
|
|
||||||
if (!target.handle || target.handle === 'target') {
|
|
||||||
return target.block
|
|
||||||
}
|
|
||||||
return { block: target.block, handle: target.handle }
|
|
||||||
})
|
|
||||||
payload[handle] = normalizedTargets.length === 1 ? normalizedTargets[0] : normalizedTargets
|
|
||||||
}
|
|
||||||
return payload
|
|
||||||
}
|
|
||||||
|
|
||||||
function findMatchingBlockId(
|
|
||||||
workflowState: { blocks: Record<string, any> },
|
|
||||||
target: TargetRef
|
|
||||||
): string | null {
|
|
||||||
if (target.blockId && workflowState.blocks[target.blockId]) {
|
|
||||||
return target.blockId
|
|
||||||
}
|
|
||||||
|
|
||||||
if (target.match) {
|
|
||||||
const type = target.match.type
|
|
||||||
const name = target.match.name?.toLowerCase()
|
|
||||||
const matches = Object.entries(workflowState.blocks || {}).filter(([_, block]) => {
|
|
||||||
const blockType = String((block as Record<string, unknown>).type || '')
|
|
||||||
const blockName = String((block as Record<string, unknown>).name || '').toLowerCase()
|
|
||||||
const typeOk = type ? blockType === type : true
|
|
||||||
const nameOk = name ? blockName === name : true
|
|
||||||
return typeOk && nameOk
|
|
||||||
})
|
|
||||||
if (matches.length === 1) {
|
|
||||||
return matches[0][0]
|
|
||||||
}
|
|
||||||
if (matches.length > 1) {
|
|
||||||
throw new Error(
|
|
||||||
`ambiguous_target: target match resolved to ${matches.length} blocks (${matches.map(([id]) => id).join(', ')})`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function getNestedValue(value: any, path: string[]): any {
|
|
||||||
let cursor = value
|
|
||||||
for (const segment of path) {
|
|
||||||
if (cursor == null || typeof cursor !== 'object') return undefined
|
|
||||||
cursor = cursor[segment]
|
|
||||||
}
|
|
||||||
return cursor
|
|
||||||
}
|
|
||||||
|
|
||||||
function setNestedValue(base: any, path: string[], nextValue: any): any {
|
|
||||||
if (path.length === 0) return nextValue
|
|
||||||
const out = Array.isArray(base) ? [...base] : { ...(base || {}) }
|
|
||||||
let cursor: any = out
|
|
||||||
for (let i = 0; i < path.length - 1; i++) {
|
|
||||||
const key = path[i]
|
|
||||||
const current = cursor[key]
|
|
||||||
cursor[key] =
|
|
||||||
current && typeof current === 'object'
|
|
||||||
? Array.isArray(current)
|
|
||||||
? [...current]
|
|
||||||
: { ...current }
|
|
||||||
: {}
|
|
||||||
cursor = cursor[key]
|
|
||||||
}
|
|
||||||
cursor[path[path.length - 1]] = nextValue
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
function removeArrayItem(arr: unknown[], value: unknown): unknown[] {
|
|
||||||
return arr.filter((item) => JSON.stringify(item) !== JSON.stringify(value))
|
|
||||||
}
|
|
||||||
|
|
||||||
function selectCredentialId(
|
|
||||||
availableCredentials: CredentialRecord[],
|
|
||||||
provider: string,
|
|
||||||
selection: z.infer<typeof CredentialSelectionSchema> | undefined
|
|
||||||
): string | null {
|
|
||||||
const providerLower = provider.toLowerCase()
|
|
||||||
const providerMatches = availableCredentials.filter((credential) => {
|
|
||||||
const credentialProvider = credential.provider.toLowerCase()
|
|
||||||
return (
|
|
||||||
credentialProvider === providerLower || credentialProvider.startsWith(`${providerLower}-`)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
const pool = providerMatches.length > 0 ? providerMatches : availableCredentials
|
|
||||||
const strategy = selection?.strategy || 'first_connected'
|
|
||||||
|
|
||||||
if (strategy === 'by_id') {
|
|
||||||
const id = selection?.id
|
|
||||||
if (!id) return null
|
|
||||||
return pool.find((credential) => credential.id === id)?.id || null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (strategy === 'by_name') {
|
|
||||||
const name = selection?.name?.toLowerCase()
|
|
||||||
if (!name) return null
|
|
||||||
const exact = pool.find((credential) => credential.name.toLowerCase() === name)
|
|
||||||
if (exact) return exact.id
|
|
||||||
const partial = pool.find((credential) => credential.name.toLowerCase().includes(name))
|
|
||||||
return partial?.id || null
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultCredential = pool.find((credential) => credential.isDefault)
|
|
||||||
if (defaultCredential) return defaultCredential.id
|
|
||||||
return pool[0]?.id || null
|
|
||||||
}
|
|
||||||
|
|
||||||
function selectCredentialFieldId(blockType: string, provider: string): string | null {
|
|
||||||
const blockConfig = getBlock(blockType)
|
|
||||||
if (!blockConfig) return null
|
|
||||||
|
|
||||||
const oauthFields = (blockConfig.subBlocks || []).filter(
|
|
||||||
(subBlock) => subBlock.type === 'oauth-input'
|
|
||||||
)
|
|
||||||
if (oauthFields.length === 0) return null
|
|
||||||
|
|
||||||
const providerKey = provider.replace(/[^a-zA-Z0-9]/g, '').toLowerCase()
|
|
||||||
const fieldMatch = oauthFields.find((subBlock) =>
|
|
||||||
subBlock.id
|
|
||||||
.replace(/[^a-zA-Z0-9]/g, '')
|
|
||||||
.toLowerCase()
|
|
||||||
.includes(providerKey)
|
|
||||||
)
|
|
||||||
if (fieldMatch) return fieldMatch.id
|
|
||||||
return oauthFields[0].id
|
|
||||||
}
|
|
||||||
|
|
||||||
function ensureConnectionTarget(
|
|
||||||
existing: ConnectionTarget[],
|
|
||||||
target: ConnectionTarget,
|
|
||||||
mode: 'set' | 'append' | 'remove'
|
|
||||||
): ConnectionTarget[] {
|
|
||||||
if (mode === 'set') {
|
|
||||||
return [target]
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mode === 'remove') {
|
|
||||||
return existing.filter(
|
|
||||||
(item) =>
|
|
||||||
!(item.block === target.block && (item.handle || 'target') === (target.handle || 'target'))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const duplicate = existing.some(
|
|
||||||
(item) =>
|
|
||||||
item.block === target.block && (item.handle || 'target') === (target.handle || 'target')
|
|
||||||
)
|
|
||||||
if (duplicate) return existing
|
|
||||||
return [...existing, target]
|
|
||||||
}
|
|
||||||
|
|
||||||
async function compileChangeSpec(params: {
|
|
||||||
changeSpec: ChangeSpec
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
userId: string
|
|
||||||
workflowId: string
|
|
||||||
}): Promise<{
|
|
||||||
operations: Array<Record<string, any>>
|
|
||||||
warnings: string[]
|
|
||||||
diagnostics: string[]
|
|
||||||
touchedBlocks: string[]
|
|
||||||
}> {
|
|
||||||
const { changeSpec, workflowState, userId, workflowId } = params
|
|
||||||
const operations: Array<Record<string, any>> = []
|
|
||||||
const diagnostics: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
const touchedBlocks = new Set<string>()
|
|
||||||
|
|
||||||
const aliasMap = new Map<string, string>()
|
|
||||||
const workingState = deepClone(workflowState)
|
|
||||||
const connectionState = buildConnectionState(workingState)
|
|
||||||
const connectionTouchedSources = new Set<string>()
|
|
||||||
const plannedBlockTypes = new Map<string, string>()
|
|
||||||
|
|
||||||
// Seed aliases from existing block names.
|
|
||||||
for (const [blockId, block] of Object.entries(workingState.blocks || {})) {
|
|
||||||
const blockName = String((block as Record<string, unknown>).name || '')
|
|
||||||
if (!blockName) continue
|
|
||||||
const normalizedAlias = blockName.replace(/[^a-zA-Z0-9]/g, '')
|
|
||||||
if (normalizedAlias && !aliasMap.has(normalizedAlias)) {
|
|
||||||
aliasMap.set(normalizedAlias, blockId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const credentialsResponse = await getCredentialsServerTool.execute({ workflowId }, { userId })
|
|
||||||
const availableCredentials: CredentialRecord[] =
|
|
||||||
credentialsResponse?.oauth?.connected?.credentials?.map((credential: any) => ({
|
|
||||||
id: String(credential.id || ''),
|
|
||||||
name: String(credential.name || ''),
|
|
||||||
provider: String(credential.provider || ''),
|
|
||||||
isDefault: Boolean(credential.isDefault),
|
|
||||||
})) || []
|
|
||||||
|
|
||||||
const resolveTarget = (
|
|
||||||
target: TargetRef | undefined,
|
|
||||||
allowCreateAlias = false
|
|
||||||
): string | null => {
|
|
||||||
if (!target) return null
|
|
||||||
if (target.blockId) {
|
|
||||||
if (workingState.blocks[target.blockId] || plannedBlockTypes.has(target.blockId)) {
|
|
||||||
return target.blockId
|
|
||||||
}
|
|
||||||
return allowCreateAlias ? target.blockId : null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (target.alias) {
|
|
||||||
if (aliasMap.has(target.alias)) return aliasMap.get(target.alias) || null
|
|
||||||
const byMatch = findMatchingBlockId(workingState, { alias: target.alias })
|
|
||||||
if (byMatch) {
|
|
||||||
aliasMap.set(target.alias, byMatch)
|
|
||||||
return byMatch
|
|
||||||
}
|
|
||||||
return allowCreateAlias ? target.alias : null
|
|
||||||
}
|
|
||||||
|
|
||||||
const matched = findMatchingBlockId(workingState, target)
|
|
||||||
if (matched) return matched
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const applyPatchChange = (
|
|
||||||
targetId: string,
|
|
||||||
blockType: string | null,
|
|
||||||
change: ChangeOperation,
|
|
||||||
paramsOut: Record<string, any>
|
|
||||||
): void => {
|
|
||||||
if (change.op === 'attach_credential') {
|
|
||||||
const provider = change.provider
|
|
||||||
if (!provider) {
|
|
||||||
diagnostics.push(`attach_credential on ${targetId} is missing provider`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (!blockType) {
|
|
||||||
diagnostics.push(`attach_credential on ${targetId} failed: unknown block type`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const credentialFieldId = selectCredentialFieldId(blockType, provider)
|
|
||||||
if (!credentialFieldId) {
|
|
||||||
const msg = `No oauth input field found for block type "${blockType}" on ${targetId}`
|
|
||||||
if (change.required) diagnostics.push(msg)
|
|
||||||
else warnings.push(msg)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const credentialId = selectCredentialId(availableCredentials, provider, change.selection)
|
|
||||||
if (!credentialId) {
|
|
||||||
const msg = `No credential found for provider "${provider}" on ${targetId}`
|
|
||||||
if (change.required) diagnostics.push(msg)
|
|
||||||
else warnings.push(msg)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
paramsOut.inputs = paramsOut.inputs || {}
|
|
||||||
paramsOut.inputs[credentialFieldId] = credentialId
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!change.path) {
|
|
||||||
diagnostics.push(`${change.op} on ${targetId} requires a path`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathSegments = change.path.split('.').filter(Boolean)
|
|
||||||
if (pathSegments.length === 0) {
|
|
||||||
diagnostics.push(`${change.op} on ${targetId} has an invalid path "${change.path}"`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathSegments[0] === 'inputs') {
|
|
||||||
const inputKey = pathSegments[1]
|
|
||||||
if (!inputKey) {
|
|
||||||
diagnostics.push(`${change.op} on ${targetId} has invalid input path "${change.path}"`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentInputValue =
|
|
||||||
paramsOut.inputs?.[inputKey] ??
|
|
||||||
workingState.blocks[targetId]?.subBlocks?.[inputKey]?.value ??
|
|
||||||
null
|
|
||||||
|
|
||||||
let nextInputValue = currentInputValue
|
|
||||||
const nestedPath = pathSegments.slice(2)
|
|
||||||
|
|
||||||
if (change.op === 'set') {
|
|
||||||
nextInputValue =
|
|
||||||
nestedPath.length > 0
|
|
||||||
? setNestedValue(currentInputValue ?? {}, nestedPath, change.value)
|
|
||||||
: change.value
|
|
||||||
} else if (change.op === 'unset') {
|
|
||||||
nextInputValue =
|
|
||||||
nestedPath.length > 0 ? setNestedValue(currentInputValue ?? {}, nestedPath, null) : null
|
|
||||||
} else if (change.op === 'merge') {
|
|
||||||
if (nestedPath.length > 0) {
|
|
||||||
const baseObject = getNestedValue(currentInputValue ?? {}, nestedPath) || {}
|
|
||||||
if (
|
|
||||||
baseObject &&
|
|
||||||
typeof baseObject === 'object' &&
|
|
||||||
change.value &&
|
|
||||||
typeof change.value === 'object'
|
|
||||||
) {
|
|
||||||
nextInputValue = setNestedValue(currentInputValue ?? {}, nestedPath, {
|
|
||||||
...baseObject,
|
|
||||||
...(change.value as Record<string, unknown>),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
diagnostics.push(`merge on ${targetId} at "${change.path}" requires object values`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
currentInputValue &&
|
|
||||||
typeof currentInputValue === 'object' &&
|
|
||||||
!Array.isArray(currentInputValue) &&
|
|
||||||
change.value &&
|
|
||||||
typeof change.value === 'object' &&
|
|
||||||
!Array.isArray(change.value)
|
|
||||||
) {
|
|
||||||
nextInputValue = { ...currentInputValue, ...(change.value as Record<string, unknown>) }
|
|
||||||
} else if (currentInputValue == null && change.value && typeof change.value === 'object') {
|
|
||||||
nextInputValue = change.value
|
|
||||||
} else {
|
|
||||||
diagnostics.push(`merge on ${targetId} at "${change.path}" requires object values`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} else if (change.op === 'append') {
|
|
||||||
const arr = Array.isArray(currentInputValue) ? [...currentInputValue] : []
|
|
||||||
arr.push(change.value)
|
|
||||||
nextInputValue = arr
|
|
||||||
} else if (change.op === 'remove') {
|
|
||||||
if (!Array.isArray(currentInputValue)) {
|
|
||||||
diagnostics.push(`remove on ${targetId} at "${change.path}" requires an array value`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
nextInputValue = removeArrayItem(currentInputValue, change.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
paramsOut.inputs = paramsOut.inputs || {}
|
|
||||||
paramsOut.inputs[inputKey] = nextInputValue
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathSegments.length !== 1) {
|
|
||||||
diagnostics.push(
|
|
||||||
`Unsupported path "${change.path}" on ${targetId}. Use inputs.* or top-level field names.`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
const topLevelField = pathSegments[0]
|
|
||||||
if (!['name', 'type', 'triggerMode', 'advancedMode', 'enabled'].includes(topLevelField)) {
|
|
||||||
diagnostics.push(`Unsupported top-level path "${change.path}" on ${targetId}`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
paramsOut[topLevelField] = change.op === 'unset' ? null : change.value
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const mutation of changeSpec.mutations || []) {
|
|
||||||
if (mutation.action === 'ensure_block') {
|
|
||||||
const targetId = resolveTarget(mutation.target, true)
|
|
||||||
if (!targetId) {
|
|
||||||
diagnostics.push('ensure_block is missing a resolvable target')
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingBlock = workingState.blocks[targetId]
|
|
||||||
if (existingBlock) {
|
|
||||||
const editParams: Record<string, any> = {}
|
|
||||||
if (mutation.name) editParams.name = mutation.name
|
|
||||||
if (mutation.type) editParams.type = mutation.type
|
|
||||||
if (mutation.inputs) editParams.inputs = mutation.inputs
|
|
||||||
if (mutation.triggerMode !== undefined) editParams.triggerMode = mutation.triggerMode
|
|
||||||
if (mutation.advancedMode !== undefined) editParams.advancedMode = mutation.advancedMode
|
|
||||||
if (mutation.enabled !== undefined) editParams.enabled = mutation.enabled
|
|
||||||
operations.push({
|
|
||||||
operation_type: 'edit',
|
|
||||||
block_id: targetId,
|
|
||||||
params: editParams,
|
|
||||||
})
|
|
||||||
touchedBlocks.add(targetId)
|
|
||||||
} else {
|
|
||||||
if (!mutation.type || !mutation.name) {
|
|
||||||
diagnostics.push(`ensure_block for "${targetId}" requires type and name when creating`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const blockId =
|
|
||||||
mutation.target?.blockId || mutation.target?.alias || createDraftBlockId(mutation.name)
|
|
||||||
const addParams: Record<string, any> = {
|
|
||||||
type: mutation.type,
|
|
||||||
name: mutation.name,
|
|
||||||
}
|
|
||||||
if (mutation.inputs) addParams.inputs = mutation.inputs
|
|
||||||
if (mutation.triggerMode !== undefined) addParams.triggerMode = mutation.triggerMode
|
|
||||||
if (mutation.advancedMode !== undefined) addParams.advancedMode = mutation.advancedMode
|
|
||||||
if (mutation.enabled !== undefined) addParams.enabled = mutation.enabled
|
|
||||||
operations.push({
|
|
||||||
operation_type: 'add',
|
|
||||||
block_id: blockId,
|
|
||||||
params: addParams,
|
|
||||||
})
|
|
||||||
workingState.blocks[blockId] = {
|
|
||||||
id: blockId,
|
|
||||||
type: mutation.type,
|
|
||||||
name: mutation.name,
|
|
||||||
subBlocks: Object.fromEntries(
|
|
||||||
Object.entries(mutation.inputs || {}).map(([key, value]) => [
|
|
||||||
key,
|
|
||||||
{ id: key, value, type: 'short-input' },
|
|
||||||
])
|
|
||||||
),
|
|
||||||
triggerMode: mutation.triggerMode || false,
|
|
||||||
advancedMode: mutation.advancedMode || false,
|
|
||||||
enabled: mutation.enabled !== undefined ? mutation.enabled : true,
|
|
||||||
}
|
|
||||||
plannedBlockTypes.set(blockId, mutation.type)
|
|
||||||
touchedBlocks.add(blockId)
|
|
||||||
if (mutation.target?.alias) aliasMap.set(mutation.target.alias, blockId)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mutation.action === 'patch_block') {
|
|
||||||
const targetId = resolveTarget(mutation.target)
|
|
||||||
if (!targetId) {
|
|
||||||
diagnostics.push('patch_block target could not be resolved')
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const blockType =
|
|
||||||
String(workingState.blocks[targetId]?.type || '') || plannedBlockTypes.get(targetId) || null
|
|
||||||
|
|
||||||
const editParams: Record<string, any> = {}
|
|
||||||
for (const change of mutation.changes || []) {
|
|
||||||
applyPatchChange(targetId, blockType, change, editParams)
|
|
||||||
}
|
|
||||||
if (Object.keys(editParams).length === 0) {
|
|
||||||
warnings.push(`patch_block for ${targetId} had no effective changes`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
operations.push({
|
|
||||||
operation_type: 'edit',
|
|
||||||
block_id: targetId,
|
|
||||||
params: editParams,
|
|
||||||
})
|
|
||||||
touchedBlocks.add(targetId)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mutation.action === 'remove_block') {
|
|
||||||
const targetId = resolveTarget(mutation.target)
|
|
||||||
if (!targetId) {
|
|
||||||
diagnostics.push('remove_block target could not be resolved')
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
operations.push({
|
|
||||||
operation_type: 'delete',
|
|
||||||
block_id: targetId,
|
|
||||||
params: {},
|
|
||||||
})
|
|
||||||
touchedBlocks.add(targetId)
|
|
||||||
connectionState.delete(targetId)
|
|
||||||
for (const [source, handles] of connectionState.entries()) {
|
|
||||||
for (const [handle, targets] of handles.entries()) {
|
|
||||||
const nextTargets = targets.filter((target) => target.block !== targetId)
|
|
||||||
handles.set(handle, nextTargets)
|
|
||||||
}
|
|
||||||
connectionTouchedSources.add(source)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mutation.action === 'connect' || mutation.action === 'disconnect') {
|
|
||||||
const from = resolveTarget(mutation.from)
|
|
||||||
const to = resolveTarget(mutation.to)
|
|
||||||
if (!from || !to) {
|
|
||||||
diagnostics.push(`${mutation.action} requires resolvable from/to targets`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const sourceHandle = normalizeHandle(mutation.handle)
|
|
||||||
const targetHandle = mutation.toHandle || 'target'
|
|
||||||
let sourceMap = connectionState.get(from)
|
|
||||||
if (!sourceMap) {
|
|
||||||
sourceMap = new Map()
|
|
||||||
connectionState.set(from, sourceMap)
|
|
||||||
}
|
|
||||||
const existingTargets = sourceMap.get(sourceHandle) || []
|
|
||||||
const mode = mutation.action === 'disconnect' ? 'remove' : mutation.mode || 'set'
|
|
||||||
const nextTargets = ensureConnectionTarget(
|
|
||||||
existingTargets,
|
|
||||||
{ block: to, handle: targetHandle },
|
|
||||||
mode
|
|
||||||
)
|
|
||||||
sourceMap.set(sourceHandle, nextTargets)
|
|
||||||
connectionTouchedSources.add(from)
|
|
||||||
touchedBlocks.add(from)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const link of changeSpec.links || []) {
|
|
||||||
const from = resolveTarget(
|
|
||||||
{
|
|
||||||
blockId: link.from.blockId,
|
|
||||||
alias: link.from.alias,
|
|
||||||
match: link.from.match,
|
|
||||||
},
|
|
||||||
true
|
|
||||||
)
|
|
||||||
const to = resolveTarget(
|
|
||||||
{
|
|
||||||
blockId: link.to.blockId,
|
|
||||||
alias: link.to.alias,
|
|
||||||
match: link.to.match,
|
|
||||||
},
|
|
||||||
true
|
|
||||||
)
|
|
||||||
if (!from || !to) {
|
|
||||||
diagnostics.push('link contains unresolved from/to target')
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const sourceHandle = normalizeHandle(link.from.handle)
|
|
||||||
const targetHandle = link.to.handle || 'target'
|
|
||||||
let sourceMap = connectionState.get(from)
|
|
||||||
if (!sourceMap) {
|
|
||||||
sourceMap = new Map()
|
|
||||||
connectionState.set(from, sourceMap)
|
|
||||||
}
|
|
||||||
const existingTargets = sourceMap.get(sourceHandle) || []
|
|
||||||
const nextTargets = ensureConnectionTarget(
|
|
||||||
existingTargets,
|
|
||||||
{ block: to, handle: targetHandle },
|
|
||||||
link.mode || 'set'
|
|
||||||
)
|
|
||||||
sourceMap.set(sourceHandle, nextTargets)
|
|
||||||
connectionTouchedSources.add(from)
|
|
||||||
touchedBlocks.add(from)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const sourceBlockId of stableUnique([...connectionTouchedSources])) {
|
|
||||||
if (!connectionState.has(sourceBlockId)) continue
|
|
||||||
const sourceConnections = connectionState.get(sourceBlockId)!
|
|
||||||
operations.push({
|
|
||||||
operation_type: 'edit',
|
|
||||||
block_id: sourceBlockId,
|
|
||||||
params: {
|
|
||||||
connections: connectionStateToPayload(sourceConnections),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
operations,
|
|
||||||
warnings,
|
|
||||||
diagnostics,
|
|
||||||
touchedBlocks: [...touchedBlocks],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function summarizeDiff(
|
|
||||||
beforeState: { blocks: Record<string, any>; edges: Array<Record<string, any>> },
|
|
||||||
afterState: { blocks: Record<string, any>; edges: Array<Record<string, any>> },
|
|
||||||
operations: Array<Record<string, any>>
|
|
||||||
): Record<string, any> {
|
|
||||||
const beforeBlocks = Object.keys(beforeState.blocks || {}).length
|
|
||||||
const afterBlocks = Object.keys(afterState.blocks || {}).length
|
|
||||||
const beforeEdges = (beforeState.edges || []).length
|
|
||||||
const afterEdges = (afterState.edges || []).length
|
|
||||||
|
|
||||||
const counts = operations.reduce<Record<string, number>>((acc, operation) => {
|
|
||||||
const opType = String(operation.operation_type || 'unknown')
|
|
||||||
acc[opType] = (acc[opType] || 0) + 1
|
|
||||||
return acc
|
|
||||||
}, {})
|
|
||||||
|
|
||||||
return {
|
|
||||||
operationCounts: counts,
|
|
||||||
blocks: {
|
|
||||||
before: beforeBlocks,
|
|
||||||
after: afterBlocks,
|
|
||||||
delta: afterBlocks - beforeBlocks,
|
|
||||||
},
|
|
||||||
edges: {
|
|
||||||
before: beforeEdges,
|
|
||||||
after: afterEdges,
|
|
||||||
delta: afterEdges - beforeEdges,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function validateAndSimulateOperations(params: {
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
operations: Array<Record<string, any>>
|
|
||||||
userId: string
|
|
||||||
}): Promise<{
|
|
||||||
operationsForApply: Array<Record<string, any>>
|
|
||||||
simulatedState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
warnings: string[]
|
|
||||||
diagnostics: string[]
|
|
||||||
}> {
|
|
||||||
const diagnostics: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
|
|
||||||
const permissionConfig = await getUserPermissionConfig(params.userId)
|
|
||||||
const { filteredOperations, errors: preValidationErrors } = await preValidateCredentialInputs(
|
|
||||||
params.operations as any,
|
|
||||||
{ userId: params.userId },
|
|
||||||
params.workflowState
|
|
||||||
)
|
|
||||||
for (const error of preValidationErrors) {
|
|
||||||
warnings.push(error.error)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { state, validationErrors, skippedItems } = applyOperationsToWorkflowState(
|
|
||||||
params.workflowState,
|
|
||||||
filteredOperations as any,
|
|
||||||
permissionConfig
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const validationError of validationErrors) {
|
|
||||||
warnings.push(validationError.error)
|
|
||||||
}
|
|
||||||
for (const skippedItem of skippedItems) {
|
|
||||||
warnings.push(skippedItem.reason)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(state.blocks || {}).length === 0) {
|
|
||||||
diagnostics.push('Simulation produced an empty workflow state')
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
operationsForApply: filteredOperations as Array<Record<string, any>>,
|
|
||||||
simulatedState: state,
|
|
||||||
warnings,
|
|
||||||
diagnostics,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowChangeServerTool: BaseServerTool<WorkflowChangeParams, any> = {
|
|
||||||
name: 'workflow_change',
|
|
||||||
inputSchema: WorkflowChangeInputSchema,
|
|
||||||
async execute(params: WorkflowChangeParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (params.mode === 'dry_run') {
|
|
||||||
const workflowId = params.workflowId || getContextPack(params.contextPackId || '')?.workflowId
|
|
||||||
if (!workflowId) {
|
|
||||||
throw new Error('workflowId is required for dry_run')
|
|
||||||
}
|
|
||||||
if (!params.changeSpec) {
|
|
||||||
throw new Error('changeSpec is required for dry_run')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'write',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(workflowId)
|
|
||||||
const currentHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
const requestedHash = params.baseSnapshotHash
|
|
||||||
if (requestedHash && requestedHash !== currentHash) {
|
|
||||||
throw new Error(
|
|
||||||
`snapshot_mismatch: expected ${requestedHash} but current state is ${currentHash}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const compileResult = await compileChangeSpec({
|
|
||||||
changeSpec: params.changeSpec,
|
|
||||||
workflowState,
|
|
||||||
userId: context.userId,
|
|
||||||
workflowId,
|
|
||||||
})
|
|
||||||
|
|
||||||
const simulation = await validateAndSimulateOperations({
|
|
||||||
workflowState,
|
|
||||||
operations: compileResult.operations,
|
|
||||||
userId: context.userId,
|
|
||||||
})
|
|
||||||
|
|
||||||
const diffSummary = summarizeDiff(
|
|
||||||
workflowState,
|
|
||||||
simulation.simulatedState,
|
|
||||||
simulation.operationsForApply
|
|
||||||
)
|
|
||||||
const diagnostics = [...compileResult.diagnostics, ...simulation.diagnostics]
|
|
||||||
const warnings = [...compileResult.warnings, ...simulation.warnings]
|
|
||||||
|
|
||||||
const proposal: WorkflowChangeProposal = {
|
|
||||||
workflowId,
|
|
||||||
baseSnapshotHash: currentHash,
|
|
||||||
compiledOperations: simulation.operationsForApply,
|
|
||||||
diffSummary,
|
|
||||||
warnings,
|
|
||||||
diagnostics,
|
|
||||||
touchedBlocks: compileResult.touchedBlocks,
|
|
||||||
}
|
|
||||||
const proposalId = saveProposal(proposal)
|
|
||||||
|
|
||||||
logger.info('Compiled workflow_change dry run', {
|
|
||||||
workflowId,
|
|
||||||
proposalId,
|
|
||||||
operationCount: proposal.compiledOperations.length,
|
|
||||||
warningCount: warnings.length,
|
|
||||||
diagnosticsCount: diagnostics.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: diagnostics.length === 0,
|
|
||||||
mode: 'dry_run',
|
|
||||||
workflowId,
|
|
||||||
proposalId,
|
|
||||||
baseSnapshotHash: currentHash,
|
|
||||||
compiledOperations: proposal.compiledOperations,
|
|
||||||
diffSummary,
|
|
||||||
warnings,
|
|
||||||
diagnostics,
|
|
||||||
touchedBlocks: proposal.touchedBlocks,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// apply mode
|
|
||||||
const proposalId = params.proposalId
|
|
||||||
if (!proposalId) {
|
|
||||||
throw new Error('proposalId is required for apply')
|
|
||||||
}
|
|
||||||
|
|
||||||
const proposal = getProposal(proposalId)
|
|
||||||
if (!proposal) {
|
|
||||||
throw new Error(`Proposal not found or expired: ${proposalId}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: proposal.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'write',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(proposal.workflowId)
|
|
||||||
const currentHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
const expectedHash = params.expectedSnapshotHash || proposal.baseSnapshotHash
|
|
||||||
if (expectedHash && expectedHash !== currentHash) {
|
|
||||||
throw new Error(`snapshot_mismatch: expected ${expectedHash} but current is ${currentHash}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const applyResult = await editWorkflowServerTool.execute(
|
|
||||||
{
|
|
||||||
workflowId: proposal.workflowId,
|
|
||||||
operations: proposal.compiledOperations as any,
|
|
||||||
},
|
|
||||||
{ userId: context.userId }
|
|
||||||
)
|
|
||||||
|
|
||||||
const appliedWorkflowState = (applyResult as any)?.workflowState
|
|
||||||
const newSnapshotHash = appliedWorkflowState
|
|
||||||
? hashWorkflowState(appliedWorkflowState as Record<string, unknown>)
|
|
||||||
: null
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
mode: 'apply',
|
|
||||||
workflowId: proposal.workflowId,
|
|
||||||
proposalId,
|
|
||||||
baseSnapshotHash: proposal.baseSnapshotHash,
|
|
||||||
newSnapshotHash,
|
|
||||||
operations: proposal.compiledOperations,
|
|
||||||
workflowState: appliedWorkflowState || null,
|
|
||||||
appliedDiff: proposal.diffSummary,
|
|
||||||
warnings: proposal.warnings,
|
|
||||||
diagnostics: proposal.diagnostics,
|
|
||||||
editResult: applyResult,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { getContextPack, saveContextPack } from './change-store'
|
|
||||||
import {
|
|
||||||
buildSchemasByType,
|
|
||||||
getAllKnownBlockTypes,
|
|
||||||
hashWorkflowState,
|
|
||||||
loadWorkflowStateFromDb,
|
|
||||||
summarizeWorkflowState,
|
|
||||||
} from './workflow-state'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowContextServerTool')
|
|
||||||
|
|
||||||
const WorkflowContextGetInputSchema = z.object({
|
|
||||||
workflowId: z.string(),
|
|
||||||
objective: z.string().optional(),
|
|
||||||
includeBlockTypes: z.array(z.string()).optional(),
|
|
||||||
includeAllSchemas: z.boolean().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
type WorkflowContextGetParams = z.infer<typeof WorkflowContextGetInputSchema>
|
|
||||||
|
|
||||||
const WorkflowContextExpandInputSchema = z.object({
|
|
||||||
contextPackId: z.string(),
|
|
||||||
blockTypes: z.array(z.string()).optional(),
|
|
||||||
schemaRefs: z.array(z.string()).optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
|
||||||
|
|
||||||
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
|
||||||
if (!schemaRef) return null
|
|
||||||
const [blockType] = schemaRef.split('@')
|
|
||||||
return blockType || null
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildAvailableBlockCatalog(
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
): Array<Record<string, any>> {
|
|
||||||
return Object.entries(schemaRefsByType)
|
|
||||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
|
||||||
.map(([blockType, schemaRef]) => ({
|
|
||||||
blockType,
|
|
||||||
schemaRef,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetParams, any> = {
|
|
||||||
name: 'workflow_context_get',
|
|
||||||
inputSchema: WorkflowContextGetInputSchema,
|
|
||||||
async execute(params: WorkflowContextGetParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
|
||||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
|
|
||||||
const blockTypesInWorkflow = Object.values(workflowState.blocks || {}).map((block: any) =>
|
|
||||||
String(block?.type || '')
|
|
||||||
)
|
|
||||||
const requestedTypes = params.includeBlockTypes || []
|
|
||||||
const includeAllSchemas = params.includeAllSchemas === true
|
|
||||||
const candidateTypes = includeAllSchemas
|
|
||||||
? getAllKnownBlockTypes()
|
|
||||||
: [...blockTypesInWorkflow, ...requestedTypes]
|
|
||||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
|
||||||
|
|
||||||
const summary = summarizeWorkflowState(workflowState)
|
|
||||||
const packId = saveContextPack({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
snapshotHash,
|
|
||||||
workflowState,
|
|
||||||
schemasByType,
|
|
||||||
schemaRefsByType,
|
|
||||||
summary: {
|
|
||||||
...summary,
|
|
||||||
objective: params.objective || null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Generated workflow context pack', {
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
contextPackId: packId,
|
|
||||||
schemaCount: Object.keys(schemaRefsByType).length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
contextPackId: packId,
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
snapshotHash,
|
|
||||||
summary: {
|
|
||||||
...summary,
|
|
||||||
objective: params.objective || null,
|
|
||||||
},
|
|
||||||
schemaRefsByType,
|
|
||||||
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
|
||||||
inScopeSchemas: schemasByType,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpandParams, any> = {
|
|
||||||
name: 'workflow_context_expand',
|
|
||||||
inputSchema: WorkflowContextExpandInputSchema,
|
|
||||||
async execute(params: WorkflowContextExpandParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const contextPack = getContextPack(params.contextPackId)
|
|
||||||
if (!contextPack) {
|
|
||||||
throw new Error(`Context pack not found or expired: ${params.contextPackId}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: contextPack.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestedBlockTypes = new Set<string>()
|
|
||||||
for (const blockType of params.blockTypes || []) {
|
|
||||||
if (blockType) requestedBlockTypes.add(blockType)
|
|
||||||
}
|
|
||||||
for (const schemaRef of params.schemaRefs || []) {
|
|
||||||
const blockType = parseSchemaRefToBlockType(schemaRef)
|
|
||||||
if (blockType) requestedBlockTypes.add(blockType)
|
|
||||||
}
|
|
||||||
|
|
||||||
const typesToExpand = [...requestedBlockTypes]
|
|
||||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
contextPackId: params.contextPackId,
|
|
||||||
workflowId: contextPack.workflowId,
|
|
||||||
snapshotHash: contextPack.snapshotHash,
|
|
||||||
schemasByType,
|
|
||||||
schemaRefsByType,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,226 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { workflow as workflowTable } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
|
||||||
import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
|
||||||
import type { SubBlockConfig } from '@/blocks/types'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowContextState')
|
|
||||||
|
|
||||||
function stableSortValue(value: any): any {
|
|
||||||
if (Array.isArray(value)) {
|
|
||||||
return value.map(stableSortValue)
|
|
||||||
}
|
|
||||||
if (value && typeof value === 'object') {
|
|
||||||
const sorted: Record<string, any> = {}
|
|
||||||
for (const key of Object.keys(value).sort()) {
|
|
||||||
sorted[key] = stableSortValue(value[key])
|
|
||||||
}
|
|
||||||
return sorted
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hashWorkflowState(state: Record<string, unknown>): string {
|
|
||||||
const stable = stableSortValue(state)
|
|
||||||
const payload = JSON.stringify(stable)
|
|
||||||
return `sha256:${crypto.createHash('sha256').update(payload).digest('hex')}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeOptions(options: unknown): string[] | null {
|
|
||||||
if (!Array.isArray(options)) return null
|
|
||||||
const normalized = options
|
|
||||||
.map((option) => {
|
|
||||||
if (option == null) return null
|
|
||||||
if (typeof option === 'object') {
|
|
||||||
const optionRecord = option as Record<string, unknown>
|
|
||||||
const id = optionRecord.id
|
|
||||||
if (typeof id === 'string') return id
|
|
||||||
const label = optionRecord.label
|
|
||||||
if (typeof label === 'string') return label
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return String(option)
|
|
||||||
})
|
|
||||||
.filter((value): value is string => Boolean(value))
|
|
||||||
return normalized.length > 0 ? normalized : null
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeRequired(required: SubBlockConfig['required']): boolean | Record<string, any> {
|
|
||||||
if (typeof required === 'boolean') return required
|
|
||||||
if (!required) return false
|
|
||||||
if (typeof required === 'object') {
|
|
||||||
const out: Record<string, any> = {}
|
|
||||||
const record = required as Record<string, unknown>
|
|
||||||
for (const key of ['field', 'operator', 'value']) {
|
|
||||||
if (record[key] !== undefined) {
|
|
||||||
out[key] = record[key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
|
||||||
const staticOptions =
|
|
||||||
typeof subBlock.options === 'function' ? null : normalizeOptions(subBlock.options)
|
|
||||||
return {
|
|
||||||
id: subBlock.id,
|
|
||||||
type: subBlock.type,
|
|
||||||
title: subBlock.title,
|
|
||||||
description: subBlock.description || null,
|
|
||||||
mode: subBlock.mode || null,
|
|
||||||
placeholder: subBlock.placeholder || null,
|
|
||||||
hidden: Boolean(subBlock.hidden),
|
|
||||||
multiSelect: Boolean(subBlock.multiSelect),
|
|
||||||
required: serializeRequired(subBlock.required),
|
|
||||||
hasDynamicOptions: typeof subBlock.options === 'function',
|
|
||||||
options: staticOptions,
|
|
||||||
defaultValue: subBlock.defaultValue ?? null,
|
|
||||||
min: subBlock.min ?? null,
|
|
||||||
max: subBlock.max ?? null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
|
||||||
const blockConfig = getBlock(blockType)
|
|
||||||
if (!blockConfig) return null
|
|
||||||
|
|
||||||
const subBlocks = Array.isArray(blockConfig.subBlocks)
|
|
||||||
? blockConfig.subBlocks.map(serializeSubBlock)
|
|
||||||
: []
|
|
||||||
const outputs = blockConfig.outputs || {}
|
|
||||||
const outputKeys = Object.keys(outputs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
blockType,
|
|
||||||
blockName: blockConfig.name || blockType,
|
|
||||||
category: blockConfig.category,
|
|
||||||
triggerAllowed: Boolean(blockConfig.triggerAllowed || blockConfig.triggers?.enabled),
|
|
||||||
hasTriggersConfig: Boolean(blockConfig.triggers?.enabled),
|
|
||||||
subBlocks,
|
|
||||||
outputKeys,
|
|
||||||
longDescription: blockConfig.longDescription || null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildSchemasByType(blockTypes: string[]): {
|
|
||||||
schemasByType: Record<string, any>
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
} {
|
|
||||||
const schemasByType: Record<string, any> = {}
|
|
||||||
const schemaRefsByType: Record<string, string> = {}
|
|
||||||
|
|
||||||
const uniqueTypes = [...new Set(blockTypes.filter(Boolean))]
|
|
||||||
for (const blockType of uniqueTypes) {
|
|
||||||
const schema = serializeBlockSchema(blockType)
|
|
||||||
if (!schema) continue
|
|
||||||
const stableSchema = stableSortValue(schema)
|
|
||||||
const schemaHash = crypto
|
|
||||||
.createHash('sha256')
|
|
||||||
.update(JSON.stringify(stableSchema))
|
|
||||||
.digest('hex')
|
|
||||||
schemasByType[blockType] = stableSchema
|
|
||||||
schemaRefsByType[blockType] = `${blockType}@sha256:${schemaHash}`
|
|
||||||
}
|
|
||||||
|
|
||||||
return { schemasByType, schemaRefsByType }
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function loadWorkflowStateFromDb(workflowId: string): Promise<{
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
workspaceId?: string
|
|
||||||
}> {
|
|
||||||
const [workflowRecord] = await db
|
|
||||||
.select({ workspaceId: workflowTable.workspaceId })
|
|
||||||
.from(workflowTable)
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
if (!workflowRecord) {
|
|
||||||
throw new Error(`Workflow ${workflowId} not found`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
|
||||||
if (!normalized) {
|
|
||||||
throw new Error(`Workflow ${workflowId} has no normalized data`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const blocks = { ...normalized.blocks }
|
|
||||||
const invalidBlockIds: string[] = []
|
|
||||||
for (const [blockId, block] of Object.entries(blocks)) {
|
|
||||||
if (!(block as { type?: unknown })?.type) {
|
|
||||||
invalidBlockIds.push(blockId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const blockId of invalidBlockIds) {
|
|
||||||
delete blocks[blockId]
|
|
||||||
}
|
|
||||||
|
|
||||||
const invalidSet = new Set(invalidBlockIds)
|
|
||||||
const edges = (normalized.edges || []).filter(
|
|
||||||
(edge: any) => !invalidSet.has(edge.source) && !invalidSet.has(edge.target)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (invalidBlockIds.length > 0) {
|
|
||||||
logger.warn('Dropped blocks without type while loading workflow state', {
|
|
||||||
workflowId,
|
|
||||||
dropped: invalidBlockIds,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
workflowState: {
|
|
||||||
blocks,
|
|
||||||
edges,
|
|
||||||
loops: normalized.loops || {},
|
|
||||||
parallels: normalized.parallels || {},
|
|
||||||
},
|
|
||||||
workspaceId: workflowRecord.workspaceId || undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function summarizeWorkflowState(workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}): Record<string, unknown> {
|
|
||||||
const blocks = workflowState.blocks || {}
|
|
||||||
const edges = workflowState.edges || []
|
|
||||||
const blockTypes: Record<string, number> = {}
|
|
||||||
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
|
||||||
|
|
||||||
for (const [blockId, block] of Object.entries(blocks)) {
|
|
||||||
const blockType = String((block as Record<string, unknown>).type || 'unknown')
|
|
||||||
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
|
||||||
if ((block as Record<string, unknown>).triggerMode === true) {
|
|
||||||
triggerBlocks.push({
|
|
||||||
id: blockId,
|
|
||||||
name: String((block as Record<string, unknown>).name || blockType),
|
|
||||||
type: blockType,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
blockCount: Object.keys(blocks).length,
|
|
||||||
edgeCount: edges.length,
|
|
||||||
loopCount: Object.keys(workflowState.loops || {}).length,
|
|
||||||
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
|
||||||
blockTypes,
|
|
||||||
triggerBlocks,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getAllKnownBlockTypes(): string[] {
|
|
||||||
return getAllBlockTypes()
|
|
||||||
}
|
|
||||||
@@ -1,194 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowVerifyServerTool')
|
|
||||||
|
|
||||||
const AcceptanceItemSchema = z.union([
|
|
||||||
z.string(),
|
|
||||||
z.object({
|
|
||||||
kind: z.string().optional(),
|
|
||||||
assert: z.string(),
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
|
|
||||||
const WorkflowVerifyInputSchema = z
|
|
||||||
.object({
|
|
||||||
workflowId: z.string(),
|
|
||||||
acceptance: z.array(AcceptanceItemSchema).optional(),
|
|
||||||
baseSnapshotHash: z.string().optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
type WorkflowVerifyParams = z.infer<typeof WorkflowVerifyInputSchema>
|
|
||||||
|
|
||||||
function normalizeName(value: string): string {
|
|
||||||
return value.trim().toLowerCase()
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveBlockToken(
|
|
||||||
workflowState: { blocks: Record<string, any> },
|
|
||||||
token: string
|
|
||||||
): string | null {
|
|
||||||
if (!token) return null
|
|
||||||
if (workflowState.blocks[token]) return token
|
|
||||||
const normalized = normalizeName(token)
|
|
||||||
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
|
||||||
const blockName = normalizeName(String((block as Record<string, unknown>).name || ''))
|
|
||||||
if (blockName === normalized) return blockId
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function hasPath(
|
|
||||||
workflowState: { edges: Array<Record<string, any>> },
|
|
||||||
blockPath: string[]
|
|
||||||
): boolean {
|
|
||||||
if (blockPath.length < 2) return true
|
|
||||||
const adjacency = new Map<string, string[]>()
|
|
||||||
for (const edge of workflowState.edges || []) {
|
|
||||||
const source = String(edge.source || '')
|
|
||||||
const target = String(edge.target || '')
|
|
||||||
if (!source || !target) continue
|
|
||||||
const existing = adjacency.get(source) || []
|
|
||||||
existing.push(target)
|
|
||||||
adjacency.set(source, existing)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < blockPath.length - 1; i++) {
|
|
||||||
const from = blockPath[i]
|
|
||||||
const to = blockPath[i + 1]
|
|
||||||
const next = adjacency.get(from) || []
|
|
||||||
if (!next.includes(to)) return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
function evaluateAssertions(params: {
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
}
|
|
||||||
assertions: string[]
|
|
||||||
}): { failures: string[]; checks: Array<Record<string, any>> } {
|
|
||||||
const failures: string[] = []
|
|
||||||
const checks: Array<Record<string, any>> = []
|
|
||||||
|
|
||||||
for (const assertion of params.assertions) {
|
|
||||||
if (assertion.startsWith('block_exists:')) {
|
|
||||||
const token = assertion.slice('block_exists:'.length).trim()
|
|
||||||
const blockId = resolveBlockToken(params.workflowState, token)
|
|
||||||
const passed = Boolean(blockId)
|
|
||||||
checks.push({ assert: assertion, passed, resolvedBlockId: blockId || null })
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (assertion.startsWith('trigger_exists:')) {
|
|
||||||
const triggerType = normalizeName(assertion.slice('trigger_exists:'.length))
|
|
||||||
const triggerBlock = Object.values(params.workflowState.blocks || {}).find((block: any) => {
|
|
||||||
if (block?.triggerMode !== true) return false
|
|
||||||
return normalizeName(String(block?.type || '')) === triggerType
|
|
||||||
})
|
|
||||||
const passed = Boolean(triggerBlock)
|
|
||||||
checks.push({ assert: assertion, passed })
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (assertion.startsWith('path_exists:')) {
|
|
||||||
const rawPath = assertion.slice('path_exists:'.length).trim()
|
|
||||||
const tokens = rawPath
|
|
||||||
.split('->')
|
|
||||||
.map((token) => token.trim())
|
|
||||||
.filter(Boolean)
|
|
||||||
const resolvedPath = tokens
|
|
||||||
.map((token) => resolveBlockToken(params.workflowState, token))
|
|
||||||
.filter((value): value is string => Boolean(value))
|
|
||||||
|
|
||||||
const resolvedAll = resolvedPath.length === tokens.length
|
|
||||||
const passed = resolvedAll && hasPath(params.workflowState, resolvedPath)
|
|
||||||
checks.push({
|
|
||||||
assert: assertion,
|
|
||||||
passed,
|
|
||||||
resolvedPath,
|
|
||||||
})
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unknown assertion format - mark as warning failure for explicit visibility.
|
|
||||||
checks.push({ assert: assertion, passed: false, reason: 'unknown_assertion_type' })
|
|
||||||
failures.push(`Unknown assertion format: ${assertion}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { failures, checks }
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowVerifyServerTool: BaseServerTool<WorkflowVerifyParams, any> = {
|
|
||||||
name: 'workflow_verify',
|
|
||||||
inputSchema: WorkflowVerifyInputSchema,
|
|
||||||
async execute(params: WorkflowVerifyParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
|
||||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
if (params.baseSnapshotHash && params.baseSnapshotHash !== snapshotHash) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
verified: false,
|
|
||||||
reason: 'snapshot_mismatch',
|
|
||||||
expected: params.baseSnapshotHash,
|
|
||||||
current: snapshotHash,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const validation = validateWorkflowState(workflowState as any, { sanitize: false })
|
|
||||||
|
|
||||||
const assertions = (params.acceptance || []).map((item) =>
|
|
||||||
typeof item === 'string' ? item : item.assert
|
|
||||||
)
|
|
||||||
const assertionResults = evaluateAssertions({
|
|
||||||
workflowState,
|
|
||||||
assertions,
|
|
||||||
})
|
|
||||||
|
|
||||||
const verified =
|
|
||||||
validation.valid && assertionResults.failures.length === 0 && validation.errors.length === 0
|
|
||||||
|
|
||||||
logger.info('Workflow verification complete', {
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
verified,
|
|
||||||
errorCount: validation.errors.length,
|
|
||||||
warningCount: validation.warnings.length,
|
|
||||||
assertionFailures: assertionResults.failures.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
verified,
|
|
||||||
snapshotHash,
|
|
||||||
validation: {
|
|
||||||
valid: validation.valid,
|
|
||||||
errors: validation.errors,
|
|
||||||
warnings: validation.warnings,
|
|
||||||
},
|
|
||||||
assertions: assertionResults.checks,
|
|
||||||
failures: assertionResults.failures,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
246
apps/sim/lib/execution/event-buffer.ts
Normal file
246
apps/sim/lib/execution/event-buffer.ts
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||||
|
|
||||||
|
const logger = createLogger('ExecutionEventBuffer')
|
||||||
|
|
||||||
|
const REDIS_PREFIX = 'execution:stream:'
|
||||||
|
const TTL_SECONDS = 60 * 60 // 1 hour
|
||||||
|
const EVENT_LIMIT = 1000
|
||||||
|
const RESERVE_BATCH = 100
|
||||||
|
const FLUSH_INTERVAL_MS = 15
|
||||||
|
const FLUSH_MAX_BATCH = 200
|
||||||
|
|
||||||
|
function getEventsKey(executionId: string) {
|
||||||
|
return `${REDIS_PREFIX}${executionId}:events`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSeqKey(executionId: string) {
|
||||||
|
return `${REDIS_PREFIX}${executionId}:seq`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMetaKey(executionId: string) {
|
||||||
|
return `${REDIS_PREFIX}${executionId}:meta`
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
|
||||||
|
|
||||||
|
export interface ExecutionStreamMeta {
|
||||||
|
status: ExecutionStreamStatus
|
||||||
|
userId?: string
|
||||||
|
workflowId?: string
|
||||||
|
updatedAt?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExecutionEventEntry {
|
||||||
|
eventId: number
|
||||||
|
executionId: string
|
||||||
|
event: ExecutionEvent
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExecutionEventWriter {
|
||||||
|
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
|
||||||
|
flush: () => Promise<void>
|
||||||
|
close: () => Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setExecutionMeta(
|
||||||
|
executionId: string,
|
||||||
|
meta: Partial<ExecutionStreamMeta>
|
||||||
|
): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
|
||||||
|
return
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const key = getMetaKey(executionId)
|
||||||
|
const payload: Record<string, string> = {
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
if (meta.status) payload.status = meta.status
|
||||||
|
if (meta.userId) payload.userId = meta.userId
|
||||||
|
if (meta.workflowId) payload.workflowId = meta.workflowId
|
||||||
|
await redis.hset(key, payload)
|
||||||
|
await redis.expire(key, TTL_SECONDS)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to update execution meta', {
|
||||||
|
executionId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const key = getMetaKey(executionId)
|
||||||
|
const meta = await redis.hgetall(key)
|
||||||
|
if (!meta || Object.keys(meta).length === 0) return null
|
||||||
|
return meta as unknown as ExecutionStreamMeta
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read execution meta', {
|
||||||
|
executionId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function readExecutionEvents(
|
||||||
|
executionId: string,
|
||||||
|
afterEventId: number
|
||||||
|
): Promise<ExecutionEventEntry[]> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return []
|
||||||
|
try {
|
||||||
|
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
|
||||||
|
return raw
|
||||||
|
.map((entry) => {
|
||||||
|
try {
|
||||||
|
return JSON.parse(entry) as ExecutionEventEntry
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read execution events', {
|
||||||
|
executionId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
logger.warn(
|
||||||
|
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
|
||||||
|
{
|
||||||
|
executionId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
write: async (event) => ({ eventId: 0, executionId, event }),
|
||||||
|
flush: async () => {},
|
||||||
|
close: async () => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let pending: ExecutionEventEntry[] = []
|
||||||
|
let nextEventId = 0
|
||||||
|
let maxReservedId = 0
|
||||||
|
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||||
|
|
||||||
|
const scheduleFlush = () => {
|
||||||
|
if (flushTimer) return
|
||||||
|
flushTimer = setTimeout(() => {
|
||||||
|
flushTimer = null
|
||||||
|
void flush()
|
||||||
|
}, FLUSH_INTERVAL_MS)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reserveIds = async (minCount: number) => {
|
||||||
|
const reserveCount = Math.max(RESERVE_BATCH, minCount)
|
||||||
|
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
|
||||||
|
const startId = newMax - reserveCount + 1
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
nextEventId = startId
|
||||||
|
maxReservedId = newMax
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let flushPromise: Promise<void> | null = null
|
||||||
|
let closed = false
|
||||||
|
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
|
||||||
|
|
||||||
|
const doFlush = async () => {
|
||||||
|
if (pending.length === 0) return
|
||||||
|
const batch = pending
|
||||||
|
pending = []
|
||||||
|
try {
|
||||||
|
const key = getEventsKey(executionId)
|
||||||
|
const zaddArgs: (string | number)[] = []
|
||||||
|
for (const entry of batch) {
|
||||||
|
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||||
|
}
|
||||||
|
const pipeline = redis.pipeline()
|
||||||
|
pipeline.zadd(key, ...zaddArgs)
|
||||||
|
pipeline.expire(key, TTL_SECONDS)
|
||||||
|
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
|
||||||
|
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
|
||||||
|
await pipeline.exec()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to flush execution events', {
|
||||||
|
executionId,
|
||||||
|
batchSize: batch.length,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
stack: error instanceof Error ? error.stack : undefined,
|
||||||
|
})
|
||||||
|
pending = batch.concat(pending)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const flush = async () => {
|
||||||
|
if (flushPromise) {
|
||||||
|
await flushPromise
|
||||||
|
return
|
||||||
|
}
|
||||||
|
flushPromise = doFlush()
|
||||||
|
try {
|
||||||
|
await flushPromise
|
||||||
|
} finally {
|
||||||
|
flushPromise = null
|
||||||
|
if (pending.length > 0) scheduleFlush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||||
|
if (closed) return { eventId: 0, executionId, event }
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
await reserveIds(1)
|
||||||
|
}
|
||||||
|
const eventId = nextEventId++
|
||||||
|
const entry: ExecutionEventEntry = { eventId, executionId, event }
|
||||||
|
pending.push(entry)
|
||||||
|
if (pending.length >= FLUSH_MAX_BATCH) {
|
||||||
|
await flush()
|
||||||
|
} else {
|
||||||
|
scheduleFlush()
|
||||||
|
}
|
||||||
|
return entry
|
||||||
|
}
|
||||||
|
|
||||||
|
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||||
|
const p = writeCore(event)
|
||||||
|
inflightWrites.add(p)
|
||||||
|
const remove = () => inflightWrites.delete(p)
|
||||||
|
p.then(remove, remove)
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
const close = async () => {
|
||||||
|
closed = true
|
||||||
|
if (flushTimer) {
|
||||||
|
clearTimeout(flushTimer)
|
||||||
|
flushTimer = null
|
||||||
|
}
|
||||||
|
if (inflightWrites.size > 0) {
|
||||||
|
await Promise.allSettled(inflightWrites)
|
||||||
|
}
|
||||||
|
if (flushPromise) {
|
||||||
|
await flushPromise
|
||||||
|
}
|
||||||
|
if (pending.length > 0) {
|
||||||
|
await doFlush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { write, flush, close }
|
||||||
|
}
|
||||||
@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
||||||
|
it.concurrent(
|
||||||
|
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
||||||
|
() => {
|
||||||
|
// Core scenario: deployed state has null individual fields, current state has
|
||||||
|
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.concurrent(
|
||||||
|
'should detect change when user edits a trigger field to a different value',
|
||||||
|
() => {
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'old-secret' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'old-secret' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent(
|
||||||
|
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
||||||
|
() => {
|
||||||
|
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.concurrent('should not detect change when triggerId differs', () => {
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
model: { value: 'gpt-4' },
|
||||||
|
triggerId: { value: null },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
model: { value: 'gpt-4' },
|
||||||
|
triggerId: { value: 'slack_webhook' },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent(
|
||||||
|
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
||||||
|
() => {
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
model: { value: 'gpt-4' },
|
||||||
|
samplePayload_slack_webhook: { value: 'old payload' },
|
||||||
|
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
model: { value: 'gpt-4' },
|
||||||
|
samplePayload_slack_webhook: { value: 'new payload' },
|
||||||
|
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.concurrent(
|
||||||
|
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
||||||
|
() => {
|
||||||
|
const deployedState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||||
|
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const currentState = createWorkflowState({
|
||||||
|
blocks: {
|
||||||
|
block1: createBlock('block1', {
|
||||||
|
type: 'starter',
|
||||||
|
subBlocks: {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||||
|
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// includeFiles changed from false to true — this IS a real change
|
||||||
|
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||||
it.concurrent('should not detect change when webhookId differs', () => {
|
it.concurrent('should not detect change when webhookId differs', () => {
|
||||||
const deployedState = createWorkflowState({
|
const deployedState = createWorkflowState({
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import {
|
|||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
normalizeSubBlockValue,
|
normalizeSubBlockValue,
|
||||||
|
normalizeTriggerConfigValues,
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
normalizeVariables,
|
normalizeVariables,
|
||||||
sanitizeVariable,
|
sanitizeVariable,
|
||||||
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Normalize trigger config values for both states before comparison
|
||||||
|
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||||
|
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||||
|
|
||||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||||
const allSubBlockIds = filterSubBlockIds([
|
const allSubBlockIds = filterSubBlockIds([
|
||||||
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||||
])
|
])
|
||||||
|
|
||||||
for (const subId of allSubBlockIds) {
|
for (const subId of allSubBlockIds) {
|
||||||
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
||||||
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
||||||
|
|
||||||
if (!currentSub || !previousSub) {
|
if (!currentSub || !previousSub) {
|
||||||
changes.push({
|
changes.push({
|
||||||
|
|||||||
@@ -4,10 +4,12 @@
|
|||||||
import { describe, expect, it } from 'vitest'
|
import { describe, expect, it } from 'vitest'
|
||||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
import {
|
import {
|
||||||
|
filterSubBlockIds,
|
||||||
normalizedStringify,
|
normalizedStringify,
|
||||||
normalizeEdge,
|
normalizeEdge,
|
||||||
normalizeLoop,
|
normalizeLoop,
|
||||||
normalizeParallel,
|
normalizeParallel,
|
||||||
|
normalizeTriggerConfigValues,
|
||||||
normalizeValue,
|
normalizeValue,
|
||||||
sanitizeInputFormat,
|
sanitizeInputFormat,
|
||||||
sanitizeTools,
|
sanitizeTools,
|
||||||
@@ -584,4 +586,214 @@ describe('Workflow Normalization Utilities', () => {
|
|||||||
expect(result2).toBe(result3)
|
expect(result2).toBe(result3)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
describe('filterSubBlockIds', () => {
|
||||||
|
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
||||||
|
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
||||||
|
const ids = [
|
||||||
|
'signingSecret',
|
||||||
|
'samplePayload_slack_webhook',
|
||||||
|
'triggerInstructions_slack_webhook',
|
||||||
|
'webhookUrlDisplay_slack_webhook',
|
||||||
|
'botToken',
|
||||||
|
]
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
||||||
|
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['signingSecret'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
||||||
|
const ids = ['mySamplePayload', 'notSamplePayload']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should return sorted results', () => {
|
||||||
|
const ids = ['zebra', 'alpha', 'middle']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should handle empty array', () => {
|
||||||
|
expect(filterSubBlockIds([])).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should handle all IDs being excluded', () => {
|
||||||
|
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
||||||
|
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['realField'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
||||||
|
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
||||||
|
const result = filterSubBlockIds(ids)
|
||||||
|
expect(result).toEqual(['signingSecret'])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe('normalizeTriggerConfigValues', () => {
|
||||||
|
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect(result).toEqual(subBlocks)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect(result).toEqual(subBlocks)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent(
|
||||||
|
'should return subBlocks unchanged when triggerConfig value is not an object',
|
||||||
|
() => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect(result).toEqual(subBlocks)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||||
|
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'old-secret' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: null, botToken: undefined },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||||
|
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { nonExistentField: 'value123' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
expect(result.nonExistentField).toBeUndefined()
|
||||||
|
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should not mutate the original subBlocks object', () => {
|
||||||
|
const original = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||||
|
}
|
||||||
|
normalizeTriggerConfigValues(original)
|
||||||
|
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
||||||
|
const subBlocks = {
|
||||||
|
triggerConfig: {
|
||||||
|
id: 'triggerConfig',
|
||||||
|
type: 'short-input',
|
||||||
|
value: { signingSecret: 'secret123' },
|
||||||
|
},
|
||||||
|
signingSecret: {
|
||||||
|
id: 'signingSecret',
|
||||||
|
type: 'short-input',
|
||||||
|
value: null,
|
||||||
|
placeholder: 'Enter signing secret',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const result = normalizeTriggerConfigValues(subBlocks)
|
||||||
|
const normalized = result.signingSecret as Record<string, unknown>
|
||||||
|
expect(normalized.value).toBe('secret123')
|
||||||
|
expect(normalized.id).toBe('signingSecret')
|
||||||
|
expect(normalized.type).toBe('short-input')
|
||||||
|
expect(normalized.placeholder).toBe('Enter signing secret')
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -418,10 +418,48 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
|||||||
*/
|
*/
|
||||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||||
return subBlockIds
|
return subBlockIds
|
||||||
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
.filter((id) => {
|
||||||
|
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||||
|
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||||
|
return false
|
||||||
|
return true
|
||||||
|
})
|
||||||
.sort()
|
.sort()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
||||||
|
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
||||||
|
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
||||||
|
* comparison between client state (with populated values) and deployed state
|
||||||
|
* (with null values from DB).
|
||||||
|
*/
|
||||||
|
export function normalizeTriggerConfigValues(
|
||||||
|
subBlocks: Record<string, unknown>
|
||||||
|
): Record<string, unknown> {
|
||||||
|
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
||||||
|
const triggerConfigValue = triggerConfigSub?.value
|
||||||
|
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
||||||
|
return subBlocks
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = { ...subBlocks }
|
||||||
|
for (const [fieldId, configValue] of Object.entries(
|
||||||
|
triggerConfigValue as Record<string, unknown>
|
||||||
|
)) {
|
||||||
|
if (configValue === null || configValue === undefined) continue
|
||||||
|
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
||||||
|
if (
|
||||||
|
existingSub &&
|
||||||
|
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
||||||
|
) {
|
||||||
|
result[fieldId] = { ...existingSub, value: configValue }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||||
|
|||||||
@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
|||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
|
setCurrentExecutionId: (workflowId, executionId) => {
|
||||||
|
set({
|
||||||
|
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||||
|
currentExecutionId: executionId,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
getCurrentExecutionId: (workflowId) => {
|
||||||
|
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
|
||||||
|
},
|
||||||
|
|
||||||
clearRunPath: (workflowId) => {
|
clearRunPath: (workflowId) => {
|
||||||
set({
|
set({
|
||||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||||
|
|||||||
@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
|
|||||||
lastRunPath: Map<string, BlockRunStatus>
|
lastRunPath: Map<string, BlockRunStatus>
|
||||||
/** Maps edge IDs to their run result from the last execution */
|
/** Maps edge IDs to their run result from the last execution */
|
||||||
lastRunEdges: Map<string, EdgeRunStatus>
|
lastRunEdges: Map<string, EdgeRunStatus>
|
||||||
|
/** The execution ID of the currently running execution */
|
||||||
|
currentExecutionId: string | null
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
|
|||||||
debugContext: null,
|
debugContext: null,
|
||||||
lastRunPath: new Map(),
|
lastRunPath: new Map(),
|
||||||
lastRunEdges: new Map(),
|
lastRunEdges: new Map(),
|
||||||
|
currentExecutionId: null,
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -96,6 +99,10 @@ export interface ExecutionActions {
|
|||||||
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
||||||
/** Clears the run path and run edges for a workflow */
|
/** Clears the run path and run edges for a workflow */
|
||||||
clearRunPath: (workflowId: string) => void
|
clearRunPath: (workflowId: string) => void
|
||||||
|
/** Stores the current execution ID for a workflow */
|
||||||
|
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
|
||||||
|
/** Returns the current execution ID for a workflow */
|
||||||
|
getCurrentExecutionId: (workflowId: string) => string | null
|
||||||
/** Resets the entire store to its initial empty state */
|
/** Resets the entire store to its initial empty state */
|
||||||
reset: () => void
|
reset: () => void
|
||||||
/** Stores a serializable execution snapshot for a workflow */
|
/** Stores a serializable execution snapshot for a workflow */
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import {
|
|||||||
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||||
import {
|
import {
|
||||||
|
COPILOT_AUTO_ALLOWED_TOOLS_API_PATH,
|
||||||
COPILOT_CHAT_API_PATH,
|
COPILOT_CHAT_API_PATH,
|
||||||
COPILOT_CHAT_STREAM_API_PATH,
|
COPILOT_CHAT_STREAM_API_PATH,
|
||||||
COPILOT_CHECKPOINTS_API_PATH,
|
COPILOT_CHECKPOINTS_API_PATH,
|
||||||
@@ -83,15 +84,6 @@ function isPageUnloading(): boolean {
|
|||||||
return _isPageUnloading
|
return _isPageUnloading
|
||||||
}
|
}
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name === 'edit_workflow') return true
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
||||||
if (typeof window === 'undefined') return null
|
if (typeof window === 'undefined') return null
|
||||||
try {
|
try {
|
||||||
@@ -148,6 +140,41 @@ function updateActiveStreamEventId(
|
|||||||
writeActiveStreamToStorage(next)
|
writeActiveStreamToStorage(next)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const AUTO_ALLOWED_TOOLS_STORAGE_KEY = 'copilot_auto_allowed_tools'
|
||||||
|
|
||||||
|
function readAutoAllowedToolsFromStorage(): string[] | null {
|
||||||
|
if (typeof window === 'undefined') return null
|
||||||
|
try {
|
||||||
|
const raw = window.localStorage.getItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY)
|
||||||
|
if (!raw) return null
|
||||||
|
const parsed = JSON.parse(raw)
|
||||||
|
if (!Array.isArray(parsed)) return null
|
||||||
|
return parsed.filter((item): item is string => typeof item === 'string')
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[AutoAllowedTools] Failed to read local cache', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeAutoAllowedToolsToStorage(tools: string[]): void {
|
||||||
|
if (typeof window === 'undefined') return
|
||||||
|
try {
|
||||||
|
window.localStorage.setItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY, JSON.stringify(tools))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[AutoAllowedTools] Failed to write local cache', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isToolAutoAllowedByList(toolId: string, autoAllowedTools: string[]): boolean {
|
||||||
|
if (!toolId) return false
|
||||||
|
const normalizedTarget = toolId.trim()
|
||||||
|
return autoAllowedTools.some((allowed) => allowed?.trim() === normalizedTarget)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear any lingering diff preview from a previous session.
|
* Clear any lingering diff preview from a previous session.
|
||||||
* Called lazily when the store is first activated (setWorkflowId).
|
* Called lazily when the store is first activated (setWorkflowId).
|
||||||
@@ -453,6 +480,11 @@ function prepareSendContext(
|
|||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
||||||
})
|
})
|
||||||
|
get()
|
||||||
|
.loadAutoAllowedTools()
|
||||||
|
.catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||||
|
})
|
||||||
|
|
||||||
let newMessages: CopilotMessage[]
|
let newMessages: CopilotMessage[]
|
||||||
if (revertState) {
|
if (revertState) {
|
||||||
@@ -1005,6 +1037,8 @@ async function resumeFromLiveStream(
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const cachedAutoAllowedTools = readAutoAllowedToolsFromStorage()
|
||||||
|
|
||||||
// Initial state (subset required for UI/streaming)
|
// Initial state (subset required for UI/streaming)
|
||||||
const initialState = {
|
const initialState = {
|
||||||
mode: 'build' as const,
|
mode: 'build' as const,
|
||||||
@@ -1039,6 +1073,8 @@ const initialState = {
|
|||||||
streamingPlanContent: '',
|
streamingPlanContent: '',
|
||||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||||
suppressAutoSelect: false,
|
suppressAutoSelect: false,
|
||||||
|
autoAllowedTools: cachedAutoAllowedTools ?? ([] as string[]),
|
||||||
|
autoAllowedToolsLoaded: cachedAutoAllowedTools !== null,
|
||||||
activeStream: null as CopilotStreamInfo | null,
|
activeStream: null as CopilotStreamInfo | null,
|
||||||
messageQueue: [] as import('./types').QueuedMessage[],
|
messageQueue: [] as import('./types').QueuedMessage[],
|
||||||
suppressAbortContinueOption: false,
|
suppressAbortContinueOption: false,
|
||||||
@@ -1077,6 +1113,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
agentPrefetch: get().agentPrefetch,
|
agentPrefetch: get().agentPrefetch,
|
||||||
availableModels: get().availableModels,
|
availableModels: get().availableModels,
|
||||||
isLoadingModels: get().isLoadingModels,
|
isLoadingModels: get().isLoadingModels,
|
||||||
|
autoAllowedTools: get().autoAllowedTools,
|
||||||
|
autoAllowedToolsLoaded: get().autoAllowedToolsLoaded,
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -1391,6 +1429,16 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
|
|
||||||
// Send a message (streaming only)
|
// Send a message (streaming only)
|
||||||
sendMessage: async (message: string, options = {}) => {
|
sendMessage: async (message: string, options = {}) => {
|
||||||
|
if (!get().autoAllowedToolsLoaded) {
|
||||||
|
try {
|
||||||
|
await get().loadAutoAllowedTools()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[Copilot] Failed to preload auto-allowed tools before send', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
||||||
if (!prepared) return
|
if (!prepared) return
|
||||||
|
|
||||||
@@ -1657,7 +1705,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -1666,9 +1714,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
// Fallback to map if not found in messages
|
// Fallback to map if not found in messages
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2361,6 +2407,74 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
loadAutoAllowedTools: async () => {
|
||||||
|
try {
|
||||||
|
logger.debug('[AutoAllowedTools] Loading from API...')
|
||||||
|
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH)
|
||||||
|
logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
|
||||||
|
} else {
|
||||||
|
set({ autoAllowedToolsLoaded: true })
|
||||||
|
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
set({ autoAllowedToolsLoaded: true })
|
||||||
|
logger.error('[AutoAllowedTools] Failed to load', { error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
addAutoAllowedTool: async (toolId: string) => {
|
||||||
|
try {
|
||||||
|
logger.debug('[AutoAllowedTools] Adding tool...', { toolId })
|
||||||
|
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolId }),
|
||||||
|
})
|
||||||
|
logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Added tool to store', { toolId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
removeAutoAllowedTool: async (toolId: string) => {
|
||||||
|
try {
|
||||||
|
const res = await fetch(
|
||||||
|
`${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`,
|
||||||
|
{
|
||||||
|
method: 'DELETE',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Removed tool', { toolId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
isToolAutoAllowed: (toolId: string) => {
|
||||||
|
const { autoAllowedTools } = get()
|
||||||
|
return isToolAutoAllowedByList(toolId, autoAllowedTools)
|
||||||
|
},
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: async () => {
|
loadSensitiveCredentialIds: async () => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -26,26 +26,6 @@ export interface CopilotToolCall {
|
|||||||
params?: Record<string, unknown>
|
params?: Record<string, unknown>
|
||||||
input?: Record<string, unknown>
|
input?: Record<string, unknown>
|
||||||
display?: ClientToolDisplay
|
display?: ClientToolDisplay
|
||||||
/** Server-provided UI contract for this tool call phase */
|
|
||||||
ui?: {
|
|
||||||
title?: string
|
|
||||||
phaseLabel?: string
|
|
||||||
icon?: string
|
|
||||||
showInterrupt?: boolean
|
|
||||||
showRemember?: boolean
|
|
||||||
autoAllowed?: boolean
|
|
||||||
actions?: Array<{
|
|
||||||
id: string
|
|
||||||
label: string
|
|
||||||
kind: 'accept' | 'reject'
|
|
||||||
remember?: boolean
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
/** Server-provided execution routing contract */
|
|
||||||
execution?: {
|
|
||||||
target?: 'go' | 'go_subagent' | 'sim_server' | 'sim_client_capability' | string
|
|
||||||
capabilityId?: string
|
|
||||||
}
|
|
||||||
/** Content streamed from a subagent (e.g., debug agent) */
|
/** Content streamed from a subagent (e.g., debug agent) */
|
||||||
subAgentContent?: string
|
subAgentContent?: string
|
||||||
/** Tool calls made by the subagent */
|
/** Tool calls made by the subagent */
|
||||||
@@ -187,6 +167,10 @@ export interface CopilotState {
|
|||||||
|
|
||||||
// Per-message metadata captured at send-time for reliable stats
|
// Per-message metadata captured at send-time for reliable stats
|
||||||
|
|
||||||
|
// Auto-allowed integration tools (tools that can run without confirmation)
|
||||||
|
autoAllowedTools: string[]
|
||||||
|
autoAllowedToolsLoaded: boolean
|
||||||
|
|
||||||
// Active stream metadata for reconnect/replay
|
// Active stream metadata for reconnect/replay
|
||||||
activeStream: CopilotStreamInfo | null
|
activeStream: CopilotStreamInfo | null
|
||||||
|
|
||||||
@@ -263,6 +247,11 @@ export interface CopilotActions {
|
|||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||||
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
addAutoAllowedTool: (toolId: string) => Promise<void>
|
||||||
|
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
||||||
|
isToolAutoAllowed: (toolId: string) => boolean
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: () => Promise<void>
|
loadSensitiveCredentialIds: () => Promise<void>
|
||||||
maskCredentialValue: (value: string) => string
|
maskCredentialValue: (value: string) => string
|
||||||
|
|||||||
@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
|
|
||||||
const newEntry = get().entries[0]
|
const newEntry = get().entries[0]
|
||||||
|
|
||||||
if (newEntry?.error) {
|
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
||||||
notifyBlockError({
|
notifyBlockError({
|
||||||
error: newEntry.error,
|
error: newEntry.error,
|
||||||
blockName: newEntry.blockName || 'Unknown Block',
|
blockName: newEntry.blockName || 'Unknown Block',
|
||||||
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
useExecutionStore.getState().clearRunPath(workflowId)
|
useExecutionStore.getState().clearRunPath(workflowId)
|
||||||
},
|
},
|
||||||
|
|
||||||
|
clearExecutionEntries: (executionId: string) =>
|
||||||
|
set((state) => ({
|
||||||
|
entries: state.entries.filter((e) => e.executionId !== executionId),
|
||||||
|
})),
|
||||||
|
|
||||||
exportConsoleCSV: (workflowId: string) => {
|
exportConsoleCSV: (workflowId: string) => {
|
||||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||||
|
|
||||||
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
|||||||
},
|
},
|
||||||
merge: (persistedState, currentState) => {
|
merge: (persistedState, currentState) => {
|
||||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||||
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
|
const rawEntries = persisted?.entries ?? currentState.entries
|
||||||
|
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||||
|
|
||||||
|
const entries = rawEntries.map((entry, index) => {
|
||||||
|
let updated = entry
|
||||||
if (entry.executionOrder === undefined) {
|
if (entry.executionOrder === undefined) {
|
||||||
return { ...entry, executionOrder: index + 1 }
|
updated = { ...updated, executionOrder: index + 1 }
|
||||||
}
|
}
|
||||||
return entry
|
if (
|
||||||
|
entry.isRunning &&
|
||||||
|
entry.startedAt &&
|
||||||
|
new Date(entry.startedAt).getTime() < oneHourAgo
|
||||||
|
) {
|
||||||
|
updated = { ...updated, isRunning: false }
|
||||||
|
}
|
||||||
|
return updated
|
||||||
})
|
})
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...currentState,
|
...currentState,
|
||||||
entries,
|
entries,
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ export interface ConsoleStore {
|
|||||||
isOpen: boolean
|
isOpen: boolean
|
||||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
||||||
clearWorkflowConsole: (workflowId: string) => void
|
clearWorkflowConsole: (workflowId: string) => void
|
||||||
|
clearExecutionEntries: (executionId: string) => void
|
||||||
exportConsoleCSV: (workflowId: string) => void
|
exportConsoleCSV: (workflowId: string) => void
|
||||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||||
toggleConsole: () => void
|
toggleConsole: () => void
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import {
|
|||||||
captureBaselineSnapshot,
|
captureBaselineSnapshot,
|
||||||
cloneWorkflowState,
|
cloneWorkflowState,
|
||||||
createBatchedUpdater,
|
createBatchedUpdater,
|
||||||
findLatestWorkflowEditToolCallId,
|
findLatestEditWorkflowToolCallId,
|
||||||
getLatestUserMessageId,
|
getLatestUserMessageId,
|
||||||
persistWorkflowStateToServer,
|
persistWorkflowStateToServer,
|
||||||
} from './utils'
|
} from './utils'
|
||||||
@@ -334,7 +334,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
@@ -439,7 +439,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
|
|||||||
@@ -126,21 +126,6 @@ export async function getLatestUserMessageId(): Promise<string | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
||||||
return findLatestWorkflowEditToolCallId()
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name === 'edit_workflow') return true
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
|
|
||||||
// Be permissive for legacy/incomplete events: apply calls always include proposalId.
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function findLatestWorkflowEditToolCallId(): Promise<string | undefined> {
|
|
||||||
try {
|
try {
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
||||||
const { messages, toolCallsById } = useCopilotStore.getState()
|
const { messages, toolCallsById } = useCopilotStore.getState()
|
||||||
@@ -149,22 +134,17 @@ export async function findLatestWorkflowEditToolCallId(): Promise<string | undef
|
|||||||
const message = messages[mi]
|
const message = messages[mi]
|
||||||
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
||||||
for (const block of message.contentBlocks) {
|
for (const block of message.contentBlocks) {
|
||||||
if (
|
if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') {
|
||||||
block?.type === 'tool_call' &&
|
|
||||||
isWorkflowEditToolCall(block.toolCall?.name, block.toolCall?.params)
|
|
||||||
) {
|
|
||||||
return block.toolCall?.id
|
return block.toolCall?.id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fallback = Object.values(toolCallsById).filter((call) =>
|
const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(call.name, call.params)
|
|
||||||
)
|
|
||||||
|
|
||||||
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn('Failed to resolve workflow edit tool call id', { error })
|
logger.warn('Failed to resolve edit_workflow tool call id', { error })
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import {
|
import {
|
||||||
buildCanonicalIndex,
|
buildCanonicalIndex,
|
||||||
type CanonicalIndex,
|
type CanonicalIndex,
|
||||||
|
type CanonicalModeOverrides,
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
getCanonicalValues,
|
getCanonicalValues,
|
||||||
isCanonicalPair,
|
isCanonicalPair,
|
||||||
@@ -12,7 +13,10 @@ import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
|||||||
export {
|
export {
|
||||||
buildCanonicalIndex,
|
buildCanonicalIndex,
|
||||||
type CanonicalIndex,
|
type CanonicalIndex,
|
||||||
|
type CanonicalModeOverrides,
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
|
isCanonicalPair,
|
||||||
|
resolveCanonicalMode,
|
||||||
type SubBlockCondition,
|
type SubBlockCondition,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,17 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format'
|
||||||
import {
|
import {
|
||||||
|
buildCanonicalIndex,
|
||||||
|
type CanonicalModeOverrides,
|
||||||
evaluateSubBlockCondition,
|
evaluateSubBlockCondition,
|
||||||
|
isCanonicalPair,
|
||||||
|
resolveCanonicalMode,
|
||||||
type SubBlockCondition,
|
type SubBlockCondition,
|
||||||
} from '@/lib/workflows/subblocks/visibility'
|
} from '@/lib/workflows/subblocks/visibility'
|
||||||
import type { SubBlockConfig as BlockSubBlockConfig } from '@/blocks/types'
|
import type { SubBlockConfig as BlockSubBlockConfig, GenerationType } from '@/blocks/types'
|
||||||
import { safeAssign } from '@/tools/safe-assign'
|
import { safeAssign } from '@/tools/safe-assign'
|
||||||
import { isEmptyTagValue } from '@/tools/shared/tags'
|
import { isEmptyTagValue } from '@/tools/shared/tags'
|
||||||
import type { ParameterVisibility, ToolConfig } from '@/tools/types'
|
import type { OAuthConfig, ParameterVisibility, ToolConfig } from '@/tools/types'
|
||||||
import { getTool } from '@/tools/utils'
|
import { getTool } from '@/tools/utils'
|
||||||
|
|
||||||
const logger = createLogger('ToolsParams')
|
const logger = createLogger('ToolsParams')
|
||||||
@@ -64,6 +68,14 @@ export interface UIComponentConfig {
|
|||||||
mode?: 'basic' | 'advanced' | 'both' | 'trigger'
|
mode?: 'basic' | 'advanced' | 'both' | 'trigger'
|
||||||
/** The actual subblock ID this config was derived from */
|
/** The actual subblock ID this config was derived from */
|
||||||
actualSubBlockId?: string
|
actualSubBlockId?: string
|
||||||
|
/** Wand configuration for AI assistance */
|
||||||
|
wandConfig?: {
|
||||||
|
enabled: boolean
|
||||||
|
prompt: string
|
||||||
|
generationType?: GenerationType
|
||||||
|
placeholder?: string
|
||||||
|
maintainHistory?: boolean
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SubBlockConfig {
|
export interface SubBlockConfig {
|
||||||
@@ -327,6 +339,7 @@ export function getToolParametersConfig(
|
|||||||
canonicalParamId: subBlock.canonicalParamId,
|
canonicalParamId: subBlock.canonicalParamId,
|
||||||
mode: subBlock.mode,
|
mode: subBlock.mode,
|
||||||
actualSubBlockId: subBlock.id,
|
actualSubBlockId: subBlock.id,
|
||||||
|
wandConfig: subBlock.wandConfig,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -812,3 +825,200 @@ export function formatParameterLabel(paramId: string): string {
|
|||||||
// Simple case - just capitalize first letter
|
// Simple case - just capitalize first letter
|
||||||
return paramId.charAt(0).toUpperCase() + paramId.slice(1)
|
return paramId.charAt(0).toUpperCase() + paramId.slice(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubBlock IDs that are "structural" — they control tool routing or auth,
|
||||||
|
* not user-facing parameters. These are excluded from tool-input rendering
|
||||||
|
* unless they have an explicit paramVisibility set.
|
||||||
|
*/
|
||||||
|
const STRUCTURAL_SUBBLOCK_IDS = new Set(['operation', 'authMethod', 'destinationType'])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubBlock types that represent auth/credential inputs handled separately
|
||||||
|
* by the tool-input OAuth credential selector.
|
||||||
|
*/
|
||||||
|
const AUTH_SUBBLOCK_TYPES = new Set(['oauth-input'])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubBlock types that should never appear in tool-input context.
|
||||||
|
*/
|
||||||
|
const EXCLUDED_SUBBLOCK_TYPES = new Set([
|
||||||
|
'tool-input',
|
||||||
|
'skill-input',
|
||||||
|
'condition-input',
|
||||||
|
'eval-input',
|
||||||
|
'webhook-config',
|
||||||
|
'schedule-info',
|
||||||
|
'trigger-save',
|
||||||
|
'input-format',
|
||||||
|
'response-format',
|
||||||
|
'mcp-server-selector',
|
||||||
|
'mcp-tool-selector',
|
||||||
|
'mcp-dynamic-args',
|
||||||
|
'input-mapping',
|
||||||
|
'variables-input',
|
||||||
|
'messages-input',
|
||||||
|
'router-input',
|
||||||
|
'text',
|
||||||
|
])
|
||||||
|
|
||||||
|
export interface SubBlocksForToolInput {
|
||||||
|
toolConfig: ToolConfig
|
||||||
|
subBlocks: BlockSubBlockConfig[]
|
||||||
|
oauthConfig?: OAuthConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns filtered SubBlockConfig[] for rendering in tool-input context.
|
||||||
|
* Uses subblock definitions as the primary source of UI metadata,
|
||||||
|
* getting all features (wandConfig, rich conditions, dependsOn, etc.) for free.
|
||||||
|
*
|
||||||
|
* For blocks without paramVisibility annotations, falls back to inferring
|
||||||
|
* visibility from the tool's param definitions.
|
||||||
|
*/
|
||||||
|
export function getSubBlocksForToolInput(
|
||||||
|
toolId: string,
|
||||||
|
blockType: string,
|
||||||
|
currentValues?: Record<string, unknown>,
|
||||||
|
canonicalModeOverrides?: CanonicalModeOverrides
|
||||||
|
): SubBlocksForToolInput | null {
|
||||||
|
try {
|
||||||
|
const toolConfig = getTool(toolId)
|
||||||
|
if (!toolConfig) {
|
||||||
|
logger.warn(`Tool not found: ${toolId}`)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const blockConfigs = getBlockConfigurations()
|
||||||
|
const blockConfig = blockConfigs[blockType]
|
||||||
|
if (!blockConfig?.subBlocks?.length) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const allSubBlocks = blockConfig.subBlocks as BlockSubBlockConfig[]
|
||||||
|
const canonicalIndex = buildCanonicalIndex(allSubBlocks)
|
||||||
|
|
||||||
|
// Build values for condition evaluation
|
||||||
|
const values = currentValues || {}
|
||||||
|
const valuesWithOperation = { ...values }
|
||||||
|
if (valuesWithOperation.operation === undefined) {
|
||||||
|
const parts = toolId.split('_')
|
||||||
|
valuesWithOperation.operation =
|
||||||
|
parts.length >= 3 ? parts.slice(2).join('_') : parts[parts.length - 1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a map of tool param IDs to their resolved visibility
|
||||||
|
const toolParamVisibility: Record<string, ParameterVisibility> = {}
|
||||||
|
for (const [paramId, param] of Object.entries(toolConfig.params || {})) {
|
||||||
|
toolParamVisibility[paramId] =
|
||||||
|
param.visibility ?? (param.required ? 'user-or-llm' : 'user-only')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which canonical groups we've already included (to avoid duplicates)
|
||||||
|
const includedCanonicalIds = new Set<string>()
|
||||||
|
|
||||||
|
const filtered: BlockSubBlockConfig[] = []
|
||||||
|
|
||||||
|
for (const sb of allSubBlocks) {
|
||||||
|
// Skip excluded types
|
||||||
|
if (EXCLUDED_SUBBLOCK_TYPES.has(sb.type)) continue
|
||||||
|
|
||||||
|
// Skip trigger-mode-only subblocks
|
||||||
|
if (sb.mode === 'trigger') continue
|
||||||
|
|
||||||
|
// Determine the effective param ID (canonical or subblock id)
|
||||||
|
const effectiveParamId = sb.canonicalParamId || sb.id
|
||||||
|
|
||||||
|
// Resolve paramVisibility: explicit > inferred from tool params > skip
|
||||||
|
let visibility = sb.paramVisibility
|
||||||
|
if (!visibility) {
|
||||||
|
// Infer from structural checks
|
||||||
|
if (STRUCTURAL_SUBBLOCK_IDS.has(sb.id)) {
|
||||||
|
visibility = 'hidden'
|
||||||
|
} else if (AUTH_SUBBLOCK_TYPES.has(sb.type)) {
|
||||||
|
visibility = 'hidden'
|
||||||
|
} else if (
|
||||||
|
sb.password &&
|
||||||
|
(sb.id === 'botToken' || sb.id === 'accessToken' || sb.id === 'apiKey')
|
||||||
|
) {
|
||||||
|
// Auth tokens without explicit paramVisibility are hidden
|
||||||
|
// (they're handled by the OAuth credential selector or structurally)
|
||||||
|
// But only if they don't have a matching tool param
|
||||||
|
if (!(sb.id in toolParamVisibility)) {
|
||||||
|
visibility = 'hidden'
|
||||||
|
} else {
|
||||||
|
visibility = toolParamVisibility[sb.id] || 'user-or-llm'
|
||||||
|
}
|
||||||
|
} else if (effectiveParamId in toolParamVisibility) {
|
||||||
|
// Fallback: infer from tool param visibility
|
||||||
|
visibility = toolParamVisibility[effectiveParamId]
|
||||||
|
} else if (sb.id in toolParamVisibility) {
|
||||||
|
visibility = toolParamVisibility[sb.id]
|
||||||
|
} else if (sb.canonicalParamId) {
|
||||||
|
// SubBlock has a canonicalParamId that doesn't directly match a tool param.
|
||||||
|
// This means the block's params() function transforms it before sending to the tool
|
||||||
|
// (e.g. listFolderId → folderId). These are user-facing inputs, default to user-or-llm.
|
||||||
|
visibility = 'user-or-llm'
|
||||||
|
} else {
|
||||||
|
// SubBlock has no corresponding tool param — skip it
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter by visibility: exclude hidden and llm-only
|
||||||
|
if (visibility === 'hidden' || visibility === 'llm-only') continue
|
||||||
|
|
||||||
|
// Evaluate condition against current values
|
||||||
|
if (sb.condition) {
|
||||||
|
const conditionMet = evaluateSubBlockCondition(
|
||||||
|
sb.condition as SubBlockCondition,
|
||||||
|
valuesWithOperation
|
||||||
|
)
|
||||||
|
if (!conditionMet) continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle canonical pairs: only include the active mode variant
|
||||||
|
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[sb.id]
|
||||||
|
if (canonicalId) {
|
||||||
|
const group = canonicalIndex.groupsById[canonicalId]
|
||||||
|
if (group && isCanonicalPair(group)) {
|
||||||
|
if (includedCanonicalIds.has(canonicalId)) continue
|
||||||
|
includedCanonicalIds.add(canonicalId)
|
||||||
|
|
||||||
|
// Determine active mode
|
||||||
|
const mode = resolveCanonicalMode(group, valuesWithOperation, canonicalModeOverrides)
|
||||||
|
if (mode === 'advanced') {
|
||||||
|
// Find the advanced variant
|
||||||
|
const advancedSb = allSubBlocks.find((s) => group.advancedIds.includes(s.id))
|
||||||
|
if (advancedSb) {
|
||||||
|
filtered.push({ ...advancedSb, paramVisibility: visibility })
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Include basic variant (current sb if it's the basic one)
|
||||||
|
if (group.basicId === sb.id) {
|
||||||
|
filtered.push({ ...sb, paramVisibility: visibility })
|
||||||
|
} else {
|
||||||
|
const basicSb = allSubBlocks.find((s) => s.id === group.basicId)
|
||||||
|
if (basicSb) {
|
||||||
|
filtered.push({ ...basicSb, paramVisibility: visibility })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-canonical, non-hidden, condition-passing subblock
|
||||||
|
filtered.push({ ...sb, paramVisibility: visibility })
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
toolConfig,
|
||||||
|
subBlocks: filtered,
|
||||||
|
oauthConfig: toolConfig.oauth,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error getting subblocks for tool input:', error)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -23,7 +23,12 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
|||||||
* with default values from the trigger definition on load, which aren't present in
|
* with default values from the trigger definition on load, which aren't present in
|
||||||
* the deployed state, causing false positive change detection.
|
* the deployed state, causing false positive change detection.
|
||||||
*/
|
*/
|
||||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
|
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
|
||||||
|
'webhookId',
|
||||||
|
'triggerPath',
|
||||||
|
'triggerConfig',
|
||||||
|
'triggerId',
|
||||||
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
||||||
|
|||||||
Reference in New Issue
Block a user