mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-06 04:35:03 -05:00
Compare commits
33 Commits
main
...
feat/the-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a675841ee | ||
|
|
7b84616a27 | ||
|
|
a411d69f68 | ||
|
|
e02fe94186 | ||
|
|
05ee6da190 | ||
|
|
5f89f625d7 | ||
|
|
adf6b83d00 | ||
|
|
f582c78220 | ||
|
|
54a5e06789 | ||
|
|
ea22b1da4d | ||
|
|
57cba2ab1e | ||
|
|
53d436835a | ||
|
|
0729e37a6e | ||
|
|
378b19abdf | ||
|
|
99f920a8d1 | ||
|
|
cb1922e033 | ||
|
|
f3b1691997 | ||
|
|
60388f10ac | ||
|
|
16c187de56 | ||
|
|
84a1d5623f | ||
|
|
3631ba7223 | ||
|
|
45034802b7 | ||
|
|
aab33485e2 | ||
|
|
52e39ab804 | ||
|
|
03e9da0941 | ||
|
|
0bb36362c6 | ||
|
|
0f66feef4a | ||
|
|
c31e54f442 | ||
|
|
5c9ebc40e5 | ||
|
|
a4cbdda235 | ||
|
|
045d68e3b3 | ||
|
|
d8c827eed1 | ||
|
|
b62f599252 |
@@ -1,7 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const GenerateApiKeySchema = z.object({
|
const GenerateApiKeySchema = z.object({
|
||||||
@@ -17,9 +17,6 @@ export async function POST(req: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
// Move environment variable access inside the function
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const body = await req.json().catch(() => ({}))
|
const body = await req.json().catch(() => ({}))
|
||||||
const validationResult = GenerateApiKeySchema.safeParse(body)
|
const validationResult = GenerateApiKeySchema.safeParse(body)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
@@ -12,8 +12,6 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -68,8 +66,6 @@ export async function DELETE(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import {
|
||||||
|
getStreamMeta,
|
||||||
|
readStreamEvents,
|
||||||
|
type StreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatStreamAPI')
|
||||||
|
const POLL_INTERVAL_MS = 250
|
||||||
|
const MAX_STREAM_MS = 10 * 60 * 1000
|
||||||
|
|
||||||
|
function encodeEvent(event: Record<string, any>): Uint8Array {
|
||||||
|
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
|
await authenticateCopilotRequestSessionOnly()
|
||||||
|
|
||||||
|
if (!isAuthenticated || !authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const streamId = url.searchParams.get('streamId') || ''
|
||||||
|
const fromParam = url.searchParams.get('from') || '0'
|
||||||
|
const fromEventId = Number(fromParam || 0)
|
||||||
|
// If batch=true, return buffered events as JSON instead of SSE
|
||||||
|
const batchMode = url.searchParams.get('batch') === 'true'
|
||||||
|
const toParam = url.searchParams.get('to')
|
||||||
|
const toEventId = toParam ? Number(toParam) : undefined
|
||||||
|
|
||||||
|
if (!streamId) {
|
||||||
|
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
|
||||||
|
logger.info('[Resume] Stream lookup', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
batchMode,
|
||||||
|
hasMeta: !!meta,
|
||||||
|
metaStatus: meta?.status,
|
||||||
|
})
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
if (meta.userId && meta.userId !== authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch mode: return all buffered events as JSON
|
||||||
|
if (batchMode) {
|
||||||
|
const events = await readStreamEvents(streamId, fromEventId)
|
||||||
|
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
|
||||||
|
logger.info('[Resume] Batch response', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
eventCount: filteredEvents.length,
|
||||||
|
})
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
events: filteredEvents,
|
||||||
|
status: meta.status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
|
||||||
|
|
||||||
|
const flushEvents = async () => {
|
||||||
|
const events = await readStreamEvents(streamId, lastEventId)
|
||||||
|
if (events.length > 0) {
|
||||||
|
logger.info('[Resume] Flushing events', {
|
||||||
|
streamId,
|
||||||
|
fromEventId: lastEventId,
|
||||||
|
eventCount: events.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const entry of events) {
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
const payload = {
|
||||||
|
...entry.event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId: entry.streamId,
|
||||||
|
}
|
||||||
|
controller.enqueue(encodeEvent(payload))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
while (Date.now() - startTime < MAX_STREAM_MS) {
|
||||||
|
const currentMeta = await getStreamMeta(streamId)
|
||||||
|
if (!currentMeta) break
|
||||||
|
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
if (currentMeta.status === 'complete' || currentMeta.status === 'error') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.signal.aborted) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Stream replay failed', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
} finally {
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(stream, { headers: SSE_HEADERS })
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -23,7 +24,8 @@ const ConfirmationSchema = z.object({
|
|||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update tool call status in Redis
|
* Write the user's tool decision to Redis. The server-side orchestrator's
|
||||||
|
* waitForToolDecision() polls Redis for this value.
|
||||||
*/
|
*/
|
||||||
async function updateToolCallStatus(
|
async function updateToolCallStatus(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
@@ -32,57 +34,24 @@ async function updateToolCallStatus(
|
|||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const redis = getRedisClient()
|
const redis = getRedisClient()
|
||||||
if (!redis) {
|
if (!redis) {
|
||||||
logger.warn('updateToolCallStatus: Redis client not available')
|
logger.warn('Redis client not available for tool confirmation')
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const key = `tool_call:${toolCallId}`
|
const key = `${REDIS_TOOL_CALL_PREFIX}${toolCallId}`
|
||||||
const timeout = 600000 // 10 minutes timeout for user confirmation
|
const payload = {
|
||||||
const pollInterval = 100 // Poll every 100ms
|
|
||||||
const startTime = Date.now()
|
|
||||||
|
|
||||||
logger.info('Polling for tool call in Redis', { toolCallId, key, timeout })
|
|
||||||
|
|
||||||
// Poll until the key exists or timeout
|
|
||||||
while (Date.now() - startTime < timeout) {
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (exists) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait before next poll
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final check if key exists after polling
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (!exists) {
|
|
||||||
logger.warn('Tool call not found in Redis after polling timeout', {
|
|
||||||
toolCallId,
|
|
||||||
key,
|
|
||||||
timeout,
|
|
||||||
pollDuration: Date.now() - startTime,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store both status and message as JSON
|
|
||||||
const toolCallData = {
|
|
||||||
status,
|
status,
|
||||||
message: message || null,
|
message: message || null,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
}
|
}
|
||||||
|
await redis.set(key, JSON.stringify(payload), 'EX', REDIS_TOOL_CALL_TTL_SECONDS)
|
||||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
|
|
||||||
|
|
||||||
return true
|
return true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to update tool call status in Redis', {
|
logger.error('Failed to update tool call status', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
message,
|
error: error instanceof Error ? error.message : String(error),
|
||||||
error: error instanceof Error ? error.message : 'Unknown error',
|
|
||||||
})
|
})
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/copilot/credentials
|
||||||
|
* Returns connected OAuth credentials for the authenticated user.
|
||||||
|
* Used by the copilot store for credential masking.
|
||||||
|
*/
|
||||||
|
export async function GET(_req: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await routeExecution('get_credentials', {}, { userId })
|
||||||
|
return NextResponse.json({ success: true, result })
|
||||||
|
} catch (error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to load credentials',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
|
||||||
|
|
||||||
const ExecuteSchema = z.object({
|
|
||||||
toolName: z.string(),
|
|
||||||
payload: z.unknown().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
|
||||||
const result = await routeExecution(toolName, payload, { userId })
|
|
||||||
|
|
||||||
try {
|
|
||||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, result })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { account, workflow } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import {
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
|
||||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
|
||||||
import { executeTool } from '@/tools'
|
|
||||||
import { getTool, resolveToolId } from '@/tools/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotExecuteToolAPI')
|
|
||||||
|
|
||||||
const ExecuteToolSchema = z.object({
|
|
||||||
toolCallId: z.string(),
|
|
||||||
toolName: z.string(),
|
|
||||||
arguments: z.record(z.any()).optional().default({}),
|
|
||||||
workflowId: z.string().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming execute-tool request`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolCallId, toolName, arguments: toolArgs, workflowId } = ExecuteToolSchema.parse(body)
|
|
||||||
|
|
||||||
const resolvedToolName = resolveToolId(toolName)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool`, {
|
|
||||||
toolCallId,
|
|
||||||
toolName,
|
|
||||||
resolvedToolName,
|
|
||||||
workflowId,
|
|
||||||
hasArgs: Object.keys(toolArgs).length > 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
const toolConfig = getTool(resolvedToolName)
|
|
||||||
if (!toolConfig) {
|
|
||||||
// Find similar tool names to help debug
|
|
||||||
const { tools: allTools } = await import('@/tools/registry')
|
|
||||||
const allToolNames = Object.keys(allTools)
|
|
||||||
const prefix = toolName.split('_').slice(0, 2).join('_')
|
|
||||||
const similarTools = allToolNames
|
|
||||||
.filter((name) => name.startsWith(`${prefix.split('_')[0]}_`))
|
|
||||||
.slice(0, 10)
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Tool not found in registry`, {
|
|
||||||
toolName,
|
|
||||||
prefix,
|
|
||||||
similarTools,
|
|
||||||
totalToolsInRegistry: allToolNames.length,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Tool not found: ${toolName}. Similar tools: ${similarTools.join(', ')}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the workspaceId from the workflow (env vars are stored at workspace level)
|
|
||||||
let workspaceId: string | undefined
|
|
||||||
if (workflowId) {
|
|
||||||
const workflowResult = await db
|
|
||||||
.select({ workspaceId: workflow.workspaceId })
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get decrypted environment variables early so we can resolve all {{VAR}} references
|
|
||||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Fetched environment variables`, {
|
|
||||||
workflowId,
|
|
||||||
workspaceId,
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
envVarKeys: Object.keys(decryptedEnvVars),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Build execution params starting with LLM-provided arguments
|
|
||||||
// Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
|
|
||||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
|
||||||
toolArgs,
|
|
||||||
decryptedEnvVars,
|
|
||||||
{ deep: true }
|
|
||||||
) as Record<string, any>
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
|
||||||
toolName,
|
|
||||||
originalArgKeys: Object.keys(toolArgs),
|
|
||||||
resolvedArgKeys: Object.keys(executionParams),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Resolve OAuth access token if required
|
|
||||||
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
|
||||||
const provider = toolConfig.oauth.provider
|
|
||||||
logger.info(`[${tracker.requestId}] Resolving OAuth token`, { provider })
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Find the account for this provider and user
|
|
||||||
const accounts = await db
|
|
||||||
.select()
|
|
||||||
.from(account)
|
|
||||||
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (accounts.length > 0) {
|
|
||||||
const acc = accounts[0]
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id)
|
|
||||||
|
|
||||||
if (accessToken) {
|
|
||||||
executionParams.accessToken = accessToken
|
|
||||||
logger.info(`[${tracker.requestId}] OAuth token resolved`, { provider })
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No access token available`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No account found for provider`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `No ${provider} account connected. Please connect your account first.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to resolve OAuth token`, {
|
|
||||||
provider,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Failed to get OAuth token for ${provider}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if tool requires an API key that wasn't resolved via {{ENV_VAR}} reference
|
|
||||||
const needsApiKey = toolConfig.params?.apiKey?.required
|
|
||||||
|
|
||||||
if (needsApiKey && !executionParams.apiKey) {
|
|
||||||
logger.warn(`[${tracker.requestId}] No API key found for tool`, { toolName })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add execution context
|
|
||||||
executionParams._context = {
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special handling for function_execute - inject environment variables
|
|
||||||
if (toolName === 'function_execute') {
|
|
||||||
executionParams.envVars = decryptedEnvVars
|
|
||||||
executionParams.workflowVariables = {} // No workflow variables in copilot context
|
|
||||||
executionParams.blockData = {} // No block data in copilot context
|
|
||||||
executionParams.blockNameMapping = {} // No block mapping in copilot context
|
|
||||||
executionParams.language = executionParams.language || 'javascript'
|
|
||||||
executionParams.timeout = executionParams.timeout || 30000
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Injected env vars for function_execute`, {
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the tool
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool with resolved credentials`, {
|
|
||||||
toolName,
|
|
||||||
hasAccessToken: !!executionParams.accessToken,
|
|
||||||
hasApiKey: !!executionParams.apiKey,
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await executeTool(resolvedToolName, executionParams)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
|
||||||
toolName,
|
|
||||||
success: result.success,
|
|
||||||
hasOutput: !!result.output,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
toolCallId,
|
|
||||||
result: {
|
|
||||||
success: result.success,
|
|
||||||
output: result.output,
|
|
||||||
error: result.error,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -10,8 +10,6 @@ import {
|
|||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const BodySchema = z.object({
|
const BodySchema = z.object({
|
||||||
messageId: z.string(),
|
messageId: z.string(),
|
||||||
diffCreated: z.boolean(),
|
diffCreated: z.boolean(),
|
||||||
|
|||||||
@@ -1,123 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const MarkCompleteSchema = z.object({
|
|
||||||
id: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
status: z.number().int(),
|
|
||||||
message: z.any().optional(),
|
|
||||||
data: z.any().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* POST /api/copilot/tools/mark-complete
|
|
||||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
|
||||||
*/
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
|
||||||
try {
|
|
||||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
|
||||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const parsed = MarkCompleteSchema.parse(body)
|
|
||||||
|
|
||||||
const messagePreview = (() => {
|
|
||||||
try {
|
|
||||||
const s =
|
|
||||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
|
||||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
|
||||||
userId,
|
|
||||||
toolCallId: parsed.id,
|
|
||||||
toolName: parsed.name,
|
|
||||||
status: parsed.status,
|
|
||||||
hasMessage: parsed.message !== undefined,
|
|
||||||
hasData: parsed.data !== undefined,
|
|
||||||
messagePreview,
|
|
||||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
|
||||||
})
|
|
||||||
|
|
||||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
|
||||||
},
|
|
||||||
body: JSON.stringify(parsed),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Attempt to parse agent response JSON
|
|
||||||
let agentJson: any = null
|
|
||||||
let agentText: string | null = null
|
|
||||||
try {
|
|
||||||
agentJson = await agentRes.json()
|
|
||||||
} catch (_) {
|
|
||||||
try {
|
|
||||||
agentText = await agentRes.text()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
|
||||||
status: agentRes.status,
|
|
||||||
ok: agentRes.ok,
|
|
||||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
|
||||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (agentRes.ok) {
|
|
||||||
return NextResponse.json({ success: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorMessage =
|
|
||||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
|
||||||
const status = agentRes.status >= 500 ? 500 : 400
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
|
||||||
status,
|
|
||||||
error: errorMessage,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
|
||||||
issues: error.issues,
|
|
||||||
})
|
|
||||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
|
||||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
394
apps/sim/app/api/mcp/copilot/route.ts
Normal file
394
apps/sim/app/api/mcp/copilot/route.ts
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
import {
|
||||||
|
type CallToolResult,
|
||||||
|
ErrorCode,
|
||||||
|
type InitializeResult,
|
||||||
|
isJSONRPCNotification,
|
||||||
|
isJSONRPCRequest,
|
||||||
|
type JSONRPCError,
|
||||||
|
type JSONRPCMessage,
|
||||||
|
type JSONRPCResponse,
|
||||||
|
type ListToolsResult,
|
||||||
|
type RequestId,
|
||||||
|
} from '@modelcontextprotocol/sdk/types.js'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
|
||||||
|
import {
|
||||||
|
executeToolServerSide,
|
||||||
|
prepareExecutionContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
|
* This is included in the initialize response to help external LLMs understand
|
||||||
|
* the workflow lifecycle and best practices.
|
||||||
|
*/
|
||||||
|
const MCP_SERVER_INSTRUCTIONS = `
|
||||||
|
## Sim Workflow Copilot
|
||||||
|
|
||||||
|
Sim is a workflow automation platform. Workflows are visual pipelines of connected blocks (Agent, Function, Condition, API, integrations, etc.). The Agent block is the core — an LLM with tools, memory, structured output, and knowledge bases.
|
||||||
|
|
||||||
|
### Workflow Lifecycle (Happy Path)
|
||||||
|
|
||||||
|
1. \`list_workspaces\` → know where to work
|
||||||
|
2. \`create_workflow(name, workspaceId)\` → get a workflowId
|
||||||
|
3. \`copilot_build(request, workflowId)\` → plan and build in one pass
|
||||||
|
4. \`copilot_test(request, workflowId)\` → verify it works
|
||||||
|
5. \`copilot_deploy("deploy as api", workflowId)\` → make it accessible externally (optional)
|
||||||
|
|
||||||
|
For fine-grained control, use \`copilot_plan\` → \`copilot_edit\` instead of \`copilot_build\`. Pass the plan object from copilot_plan EXACTLY as-is to copilot_edit's context.plan field.
|
||||||
|
|
||||||
|
### Working with Existing Workflows
|
||||||
|
|
||||||
|
When the user refers to a workflow by name or description ("the email one", "my Slack bot"):
|
||||||
|
1. Use \`copilot_discovery\` to find it by functionality
|
||||||
|
2. Or use \`list_workflows\` and match by name
|
||||||
|
3. Then pass the workflowId to other tools
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- \`rename_workflow\` — rename a workflow
|
||||||
|
- \`move_workflow\` — move a workflow into a folder (or root with null)
|
||||||
|
- \`move_folder\` — nest a folder inside another (or root with null)
|
||||||
|
- \`create_folder(name, parentId)\` — create nested folder hierarchies
|
||||||
|
|
||||||
|
### Key Rules
|
||||||
|
|
||||||
|
- You can test workflows immediately after building — deployment is only needed for external access (API, chat, MCP).
|
||||||
|
- All copilot tools (build, plan, edit, deploy, test, debug) require workflowId.
|
||||||
|
- If the user reports errors → use \`copilot_debug\` first, don't guess.
|
||||||
|
- Variable syntax: \`<blockname.field>\` for block outputs, \`{{ENV_VAR}}\` for env vars.
|
||||||
|
`
|
||||||
|
|
||||||
|
function createResponse(id: RequestId, result: unknown): JSONRPCResponse {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
result: result as JSONRPCResponse['result'],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createError(id: RequestId, code: ErrorCode | number, message: string): JSONRPCError {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
error: { code, message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
return NextResponse.json({
|
||||||
|
name: 'copilot-subagents',
|
||||||
|
version: '1.0.0',
|
||||||
|
protocolVersion: '2024-11-05',
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = (await request.json()) as JSONRPCMessage
|
||||||
|
|
||||||
|
if (isJSONRPCNotification(body)) {
|
||||||
|
return new NextResponse(null, { status: 202 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isJSONRPCRequest(body)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(0, ErrorCode.InvalidRequest, 'Invalid JSON-RPC message'),
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { id, method, params } = body
|
||||||
|
|
||||||
|
switch (method) {
|
||||||
|
case 'initialize': {
|
||||||
|
const result: InitializeResult = {
|
||||||
|
protocolVersion: '2024-11-05',
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
serverInfo: { name: 'sim-copilot', version: '1.0.0' },
|
||||||
|
instructions: MCP_SERVER_INSTRUCTIONS,
|
||||||
|
}
|
||||||
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
}
|
||||||
|
case 'ping':
|
||||||
|
return NextResponse.json(createResponse(id, {}))
|
||||||
|
case 'tools/list':
|
||||||
|
return handleToolsList(id)
|
||||||
|
case 'tools/call':
|
||||||
|
return handleToolsCall(
|
||||||
|
id,
|
||||||
|
params as { name: string; arguments?: Record<string, unknown> },
|
||||||
|
auth.userId
|
||||||
|
)
|
||||||
|
default:
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.MethodNotFound, `Method not found: ${method}`),
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error handling MCP request', { error })
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), {
|
||||||
|
status: 500,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsList(id: RequestId): Promise<NextResponse> {
|
||||||
|
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const result: ListToolsResult = {
|
||||||
|
tools: [...directTools, ...subagentTools],
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsCall(
|
||||||
|
id: RequestId,
|
||||||
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
const args = params.arguments || {}
|
||||||
|
|
||||||
|
// Check if this is a direct tool (fast, no LLM)
|
||||||
|
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (directTool) {
|
||||||
|
return handleDirectToolCall(id, directTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is a subagent tool (uses LLM orchestration)
|
||||||
|
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (subagentTool) {
|
||||||
|
return handleSubagentToolCall(id, subagentTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.MethodNotFound, `Tool not found: ${params.name}`),
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDirectToolCall(
|
||||||
|
id: RequestId,
|
||||||
|
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
try {
|
||||||
|
const execContext = await prepareExecutionContext(userId, (args.workflowId as string) || '')
|
||||||
|
|
||||||
|
const toolCall = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
name: toolDef.toolId,
|
||||||
|
status: 'pending' as const,
|
||||||
|
params: args as Record<string, any>,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(result.output ?? result, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Direct tool execution failed', { tool: toolDef.name, error })
|
||||||
|
return NextResponse.json(
|
||||||
|
createError(id, ErrorCode.InternalError, `Tool execution failed: ${error}`),
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build mode uses the main chat orchestrator with the 'fast' command instead of
|
||||||
|
* the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode
|
||||||
|
* (ModeFast) on the main chat processor that bypasses subagent orchestration and
|
||||||
|
* executes all tools directly.
|
||||||
|
*/
|
||||||
|
async function handleBuildToolCall(
|
||||||
|
id: RequestId,
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
try {
|
||||||
|
const requestText = (args.request as string) || JSON.stringify(args)
|
||||||
|
const { model } = getCopilotModel('chat')
|
||||||
|
const workflowId = args.workflowId as string | undefined
|
||||||
|
|
||||||
|
const resolved = workflowId ? { workflowId } : await resolveWorkflowIdForUser(userId)
|
||||||
|
|
||||||
|
if (!resolved?.workflowId) {
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'workflowId is required for build. Call create_workflow first.',
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
2
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatId = crypto.randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: requestText,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId,
|
||||||
|
model,
|
||||||
|
mode: 'agent',
|
||||||
|
commands: ['fast'],
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true,
|
||||||
|
chatId,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: true,
|
||||||
|
timeout: 300000,
|
||||||
|
interactive: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
error: result.error,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Build tool call failed', { error })
|
||||||
|
return NextResponse.json(createError(id, ErrorCode.InternalError, `Build failed: ${error}`), {
|
||||||
|
status: 500,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSubagentToolCall(
|
||||||
|
id: RequestId,
|
||||||
|
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
// Build mode uses the main chat endpoint, not the subagent endpoint
|
||||||
|
if (toolDef.agentId === 'build') {
|
||||||
|
return handleBuildToolCall(id, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestText =
|
||||||
|
(args.request as string) ||
|
||||||
|
(args.message as string) ||
|
||||||
|
(args.error as string) ||
|
||||||
|
JSON.stringify(args)
|
||||||
|
|
||||||
|
const context = (args.context as Record<string, unknown>) || {}
|
||||||
|
if (args.plan && !context.plan) {
|
||||||
|
context.plan = args.plan
|
||||||
|
}
|
||||||
|
|
||||||
|
const { model } = getCopilotModel('chat')
|
||||||
|
|
||||||
|
const result = await orchestrateSubagentStream(
|
||||||
|
toolDef.agentId,
|
||||||
|
{
|
||||||
|
message: requestText,
|
||||||
|
workflowId: args.workflowId,
|
||||||
|
workspaceId: args.workspaceId,
|
||||||
|
context,
|
||||||
|
model,
|
||||||
|
headless: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
workflowId: args.workflowId as string | undefined,
|
||||||
|
workspaceId: args.workspaceId as string | undefined,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
let responseData: unknown
|
||||||
|
if (result.structuredResult) {
|
||||||
|
responseData = {
|
||||||
|
success: result.structuredResult.success ?? result.success,
|
||||||
|
type: result.structuredResult.type,
|
||||||
|
summary: result.structuredResult.summary,
|
||||||
|
data: result.structuredResult.data,
|
||||||
|
}
|
||||||
|
} else if (result.error) {
|
||||||
|
responseData = {
|
||||||
|
success: false,
|
||||||
|
error: result.error,
|
||||||
|
errors: result.errors,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: CallToolResult = {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(responseData, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json(createResponse(id, response))
|
||||||
|
}
|
||||||
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
|
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotHeadlessAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
message: z.string().min(1, 'message is required'),
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
workflowName: z.string().optional(),
|
||||||
|
chatId: z.string().optional(),
|
||||||
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
|
model: z.string().optional(),
|
||||||
|
autoExecuteTools: z.boolean().optional().default(true),
|
||||||
|
timeout: z.number().optional().default(300000),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/v1/copilot/chat
|
||||||
|
* Headless copilot endpoint for server-side orchestration.
|
||||||
|
*
|
||||||
|
* workflowId is optional - if not provided:
|
||||||
|
* - If workflowName is provided, finds that workflow
|
||||||
|
* - Otherwise uses the user's first workflow as context
|
||||||
|
* - The copilot can still operate on any workflow using list_user_workflows
|
||||||
|
*/
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
const auth = await authenticateV1Request(req)
|
||||||
|
if (!auth.authenticated || !auth.userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: auth.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await req.json()
|
||||||
|
const parsed = RequestSchema.parse(body)
|
||||||
|
const defaults = getCopilotModel('chat')
|
||||||
|
const selectedModel = parsed.model || defaults.model
|
||||||
|
|
||||||
|
// Resolve workflow ID
|
||||||
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
|
auth.userId,
|
||||||
|
parsed.workflowId,
|
||||||
|
parsed.workflowName
|
||||||
|
)
|
||||||
|
if (!resolved) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'No workflows found. Create a workflow first or provide a valid workflowId.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform mode to transport mode (same as client API)
|
||||||
|
// build and agent both map to 'agent' on the backend
|
||||||
|
const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode
|
||||||
|
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||||
|
|
||||||
|
// Always generate a chatId - required for artifacts system to work with subagents
|
||||||
|
const chatId = parsed.chatId || crypto.randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: parsed.message,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId: auth.userId,
|
||||||
|
model: selectedModel,
|
||||||
|
mode: transportMode,
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true,
|
||||||
|
chatId,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId: auth.userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: parsed.autoExecuteTools,
|
||||||
|
timeout: parsed.timeout,
|
||||||
|
interactive: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
chatId: result.chatId || chatId, // Return the chatId for conversation continuity
|
||||||
|
conversationId: result.conversationId,
|
||||||
|
error: result.error,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('Headless copilot request failed', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json({ success: false, error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -211,7 +211,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
if (block.type === 'text') {
|
if (block.type === 'text') {
|
||||||
const isLastTextBlock =
|
const isLastTextBlock =
|
||||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||||
const parsed = parseSpecialTags(block.content)
|
const parsed = parseSpecialTags(block.content ?? '')
|
||||||
// Mask credential IDs in the displayed content
|
// Mask credential IDs in the displayed content
|
||||||
const cleanBlockContent = maskCredentialValue(
|
const cleanBlockContent = maskCredentialValue(
|
||||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||||
@@ -243,7 +243,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
return (
|
return (
|
||||||
<div key={blockKey} className='w-full'>
|
<div key={blockKey} className='w-full'>
|
||||||
<ThinkingBlock
|
<ThinkingBlock
|
||||||
content={maskCredentialValue(block.content)}
|
content={maskCredentialValue(block.content ?? '')}
|
||||||
isStreaming={isActivelyStreaming}
|
isStreaming={isActivelyStreaming}
|
||||||
hasFollowingContent={hasFollowingContent}
|
hasFollowingContent={hasFollowingContent}
|
||||||
hasSpecialTags={hasSpecialTags}
|
hasSpecialTags={hasSpecialTags}
|
||||||
@@ -251,7 +251,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (block.type === 'tool_call') {
|
if (block.type === 'tool_call' && block.toolCall) {
|
||||||
const blockKey = `tool-${block.toolCall.id}`
|
const blockKey = `tool-${block.toolCall.id}`
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,20 +1,15 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
import clsx from 'clsx'
|
import clsx from 'clsx'
|
||||||
import { ChevronUp, LayoutList } from 'lucide-react'
|
import { ChevronUp, LayoutList } from 'lucide-react'
|
||||||
import Editor from 'react-simple-code-editor'
|
import Editor from 'react-simple-code-editor'
|
||||||
import { Button, Code, getCodeEditorProps, highlight, languages } from '@/components/emcn'
|
import { Button, Code, getCodeEditorProps, highlight, languages } from '@/components/emcn'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { getClientTool } from '@/lib/copilot/tools/client/manager'
|
|
||||||
import { getRegisteredTools } from '@/lib/copilot/tools/client/registry'
|
|
||||||
import '@/lib/copilot/tools/client/init-tool-configs'
|
|
||||||
import {
|
import {
|
||||||
getSubagentLabels as getSubagentLabelsFromConfig,
|
ClientToolCallState,
|
||||||
getToolUIConfig,
|
TOOL_DISPLAY_REGISTRY,
|
||||||
hasInterrupt as hasInterruptFromConfig,
|
} from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
isSpecialTool as isSpecialToolFromConfig,
|
|
||||||
} from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||||
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
||||||
@@ -25,7 +20,6 @@ import { getDisplayValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/co
|
|||||||
import { getBlock } from '@/blocks/registry'
|
import { getBlock } from '@/blocks/registry'
|
||||||
import type { CopilotToolCall } from '@/stores/panel'
|
import type { CopilotToolCall } from '@/stores/panel'
|
||||||
import { useCopilotStore } from '@/stores/panel'
|
import { useCopilotStore } from '@/stores/panel'
|
||||||
import { CLASS_TOOL_METADATA } from '@/stores/panel/copilot/store'
|
|
||||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
|
|
||||||
@@ -710,8 +704,8 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
|||||||
* @returns The completion label from UI config, defaults to 'Thought'
|
* @returns The completion label from UI config, defaults to 'Thought'
|
||||||
*/
|
*/
|
||||||
function getSubagentCompletionLabel(toolName: string): string {
|
function getSubagentCompletionLabel(toolName: string): string {
|
||||||
const labels = getSubagentLabelsFromConfig(toolName, false)
|
const labels = TOOL_DISPLAY_REGISTRY[toolName]?.uiConfig?.subagentLabels
|
||||||
return labels?.completed ?? 'Thought'
|
return labels?.completed || 'Thought'
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -943,7 +937,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
* Determines if a tool call should display with special gradient styling.
|
* Determines if a tool call should display with special gradient styling.
|
||||||
*/
|
*/
|
||||||
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
||||||
return isSpecialToolFromConfig(toolCall.name)
|
return TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.isSpecial === true
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1223,28 +1217,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
|
|
||||||
/** Checks if a tool is server-side executed (not a client tool) */
|
/** Checks if a tool is server-side executed (not a client tool) */
|
||||||
function isIntegrationTool(toolName: string): boolean {
|
function isIntegrationTool(toolName: string): boolean {
|
||||||
return !CLASS_TOOL_METADATA[toolName]
|
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
}
|
}
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (hasInterruptFromConfig(toolCall.name) && toolCall.state === 'pending') {
|
const hasInterrupt = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt === true
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const instance = getClientTool(toolCall.id)
|
|
||||||
let hasInterrupt = !!instance?.getInterruptDisplays?.()
|
|
||||||
if (!hasInterrupt) {
|
|
||||||
try {
|
|
||||||
const def = getRegisteredTools()[toolCall.name]
|
|
||||||
if (def) {
|
|
||||||
hasInterrupt =
|
|
||||||
typeof def.hasInterrupt === 'function'
|
|
||||||
? !!def.hasInterrupt(toolCall.params || {})
|
|
||||||
: !!def.hasInterrupt
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasInterrupt && toolCall.state === 'pending') {
|
if (hasInterrupt && toolCall.state === 'pending') {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -1257,109 +1234,50 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
|
async function sendToolDecision(
|
||||||
|
toolCallId: string,
|
||||||
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await fetch('/api/copilot/confirm', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
|
toolCallId,
|
||||||
|
status,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
) {
|
) {
|
||||||
const instance = getClientTool(toolCall.id)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
|
onStateChange?.('executing')
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
onStateChange?.('executing')
|
|
||||||
try {
|
|
||||||
await useCopilotStore.getState().executeIntegrationTool(toolCall.id)
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
onStateChange?.('error')
|
|
||||||
try {
|
|
||||||
await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 500,
|
|
||||||
message: e instanceof Error ? e.message : 'Tool execution failed',
|
|
||||||
data: { error: e instanceof Error ? e.message : String(e) },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
} catch {
|
|
||||||
console.error('[handleRun] Failed to notify backend of tool error:', toolCall.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!instance) return
|
|
||||||
try {
|
|
||||||
const mergedParams =
|
|
||||||
editedParams ||
|
|
||||||
(toolCall as any).params ||
|
|
||||||
(toolCall as any).parameters ||
|
|
||||||
(toolCall as any).input ||
|
|
||||||
{}
|
|
||||||
await instance.handleAccept?.(mergedParams)
|
|
||||||
onStateChange?.('executing')
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||||
const instance = getClientTool(toolCall.id)
|
|
||||||
|
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
|
||||||
onStateChange?.('rejected')
|
|
||||||
|
|
||||||
let notified = false
|
|
||||||
for (let attempt = 0; attempt < 3 && !notified; attempt++) {
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 400,
|
|
||||||
message: 'Tool execution skipped by user',
|
|
||||||
data: { skipped: true, reason: 'user_skipped' },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (res.ok) {
|
|
||||||
notified = true
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
if (attempt < 2) {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!notified) {
|
|
||||||
console.error('[handleSkip] Failed to notify backend after 3 attempts:', toolCall.id)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (instance) {
|
|
||||||
try {
|
|
||||||
await instance.handleReject?.()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
setToolCallState(toolCall, 'rejected')
|
||||||
onStateChange?.('rejected')
|
onStateChange?.('rejected')
|
||||||
|
await sendToolDecision(toolCall.id, 'rejected')
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
try {
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
const def = getRegisteredTools()[toolCall.name] as any
|
const byState = registryEntry?.displayNames?.[toolCall.state as ClientToolCallState]
|
||||||
const byState = def?.metadata?.displayNames?.[toolCall.state]
|
if (byState?.text) return byState.text
|
||||||
if (byState?.text) return byState.text
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const stateVerb = getStateVerb(toolCall.state)
|
const stateVerb = getStateVerb(toolCall.state)
|
||||||
const formattedName = formatToolName(toolCall.name)
|
const formattedName = formatToolName(toolCall.name)
|
||||||
@@ -1509,7 +1427,7 @@ export function ToolCall({
|
|||||||
// Check if this integration tool is auto-allowed
|
// Check if this integration tool is auto-allowed
|
||||||
// Subscribe to autoAllowedTools so we re-render when it changes
|
// Subscribe to autoAllowedTools so we re-render when it changes
|
||||||
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
||||||
const { removeAutoAllowedTool } = useCopilotStore()
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
@@ -1537,23 +1455,7 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const SUBAGENT_TOOLS = [
|
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
'plan',
|
|
||||||
'edit',
|
|
||||||
'debug',
|
|
||||||
'test',
|
|
||||||
'deploy',
|
|
||||||
'evaluate',
|
|
||||||
'auth',
|
|
||||||
'research',
|
|
||||||
'knowledge',
|
|
||||||
'custom_tool',
|
|
||||||
'tour',
|
|
||||||
'info',
|
|
||||||
'workflow',
|
|
||||||
'superagent',
|
|
||||||
]
|
|
||||||
const isSubagentTool = SUBAGENT_TOOLS.includes(toolCall.name)
|
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1593,17 +1495,18 @@ export function ToolCall({
|
|||||||
stateStr === 'aborted'
|
stateStr === 'aborted'
|
||||||
|
|
||||||
// Allow rendering if:
|
// Allow rendering if:
|
||||||
// 1. Tool is in CLASS_TOOL_METADATA (client tools), OR
|
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||||
// 3. Tool call is already completed (historical - should always render)
|
// 3. Tool call is already completed (historical - should always render)
|
||||||
const isClientTool = !!CLASS_TOOL_METADATA[toolCall.name]
|
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||||
|
|
||||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!getToolUIConfig(toolCall.name)?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
const isRunWorkflow = toolCall.name === 'run_workflow'
|
const isRunWorkflow = toolCall.name === 'run_workflow'
|
||||||
const isExpandableTool =
|
const isExpandableTool =
|
||||||
hasParamsTable ||
|
hasParamsTable ||
|
||||||
@@ -1613,7 +1516,6 @@ export function ToolCall({
|
|||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
const toolUIConfig = getToolUIConfig(toolCall.name)
|
|
||||||
const secondaryAction = toolUIConfig?.secondaryAction
|
const secondaryAction = toolUIConfig?.secondaryAction
|
||||||
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
||||||
toolCall.state as ClientToolCallState
|
toolCall.state as ClientToolCallState
|
||||||
@@ -2211,16 +2113,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background)
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
'The user has chosen to move the workflow execution to the background. Check back with them later to know when the workflow execution is complete'
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Move to Background'
|
title='Move to Background'
|
||||||
@@ -2232,21 +2127,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background, {
|
|
||||||
result: { _elapsedSeconds: elapsedSeconds },
|
|
||||||
})
|
|
||||||
const { updateToolCallParams } = useCopilotStore.getState()
|
|
||||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Wake'
|
title='Wake'
|
||||||
|
|||||||
@@ -107,13 +107,13 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
currentChat,
|
currentChat,
|
||||||
selectChat,
|
selectChat,
|
||||||
deleteChat,
|
deleteChat,
|
||||||
areChatsFresh,
|
|
||||||
workflowId: copilotWorkflowId,
|
workflowId: copilotWorkflowId,
|
||||||
setPlanTodos,
|
setPlanTodos,
|
||||||
closePlanTodos,
|
closePlanTodos,
|
||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
// Initialize copilot
|
// Initialize copilot
|
||||||
@@ -126,6 +126,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Handle scroll management (80px stickiness for copilot)
|
// Handle scroll management (80px stickiness for copilot)
|
||||||
@@ -140,7 +141,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
copilotWorkflowId,
|
copilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
areChatsFresh,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -421,8 +421,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Show loading state until fully initialized */}
|
{/* Show loading state until fully initialized, but skip if actively streaming (resume case) */}
|
||||||
{!isInitialized ? (
|
{!isInitialized && !isSendingMessage ? (
|
||||||
<div className='flex h-full w-full items-center justify-center'>
|
<div className='flex h-full w-full items-center justify-center'>
|
||||||
<div className='flex flex-col items-center gap-3'>
|
<div className='flex flex-col items-center gap-3'>
|
||||||
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ interface UseChatHistoryProps {
|
|||||||
activeWorkflowId: string | null
|
activeWorkflowId: string | null
|
||||||
copilotWorkflowId: string | null
|
copilotWorkflowId: string | null
|
||||||
loadChats: (forceRefresh: boolean) => Promise<void>
|
loadChats: (forceRefresh: boolean) => Promise<void>
|
||||||
areChatsFresh: (workflowId: string) => boolean
|
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -21,8 +20,7 @@ interface UseChatHistoryProps {
|
|||||||
* @returns Chat history utilities
|
* @returns Chat history utilities
|
||||||
*/
|
*/
|
||||||
export function useChatHistory(props: UseChatHistoryProps) {
|
export function useChatHistory(props: UseChatHistoryProps) {
|
||||||
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, areChatsFresh, isSendingMessage } =
|
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, isSendingMessage } = props
|
||||||
props
|
|
||||||
|
|
||||||
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
||||||
const groupedChats = useMemo(() => {
|
const groupedChats = useMemo(() => {
|
||||||
@@ -80,7 +78,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
|||||||
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
||||||
const handleHistoryDropdownOpen = useCallback(
|
const handleHistoryDropdownOpen = useCallback(
|
||||||
(open: boolean) => {
|
(open: boolean) => {
|
||||||
if (open && activeWorkflowId && !isSendingMessage && !areChatsFresh(activeWorkflowId)) {
|
if (open && activeWorkflowId && !isSendingMessage) {
|
||||||
loadChats(false).catch((error) => {
|
loadChats(false).catch((error) => {
|
||||||
logger.error('Failed to load chat history:', error)
|
logger.error('Failed to load chat history:', error)
|
||||||
})
|
})
|
||||||
@@ -90,7 +88,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
|||||||
logger.info('Chat history opened during stream - showing cached data only')
|
logger.info('Chat history opened during stream - showing cached data only')
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[activeWorkflowId, areChatsFresh, isSendingMessage, loadChats]
|
[activeWorkflowId, isSendingMessage, loadChats]
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ interface UseCopilotInitializationProps {
|
|||||||
loadAutoAllowedTools: () => Promise<void>
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
currentChat: any
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -32,11 +33,13 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
|
|
||||||
const [isInitialized, setIsInitialized] = useState(false)
|
const [isInitialized, setIsInitialized] = useState(false)
|
||||||
const lastWorkflowIdRef = useRef<string | null>(null)
|
const lastWorkflowIdRef = useRef<string | null>(null)
|
||||||
const hasMountedRef = useRef(false)
|
const hasMountedRef = useRef(false)
|
||||||
|
const hasResumedRef = useRef(false)
|
||||||
|
|
||||||
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -105,6 +108,16 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
/** Try to resume active stream on mount - runs early, before waiting for chats */
|
||||||
|
useEffect(() => {
|
||||||
|
if (hasResumedRef.current || isSendingMessage) return
|
||||||
|
hasResumedRef.current = true
|
||||||
|
// Resume immediately on mount - don't wait for isInitialized
|
||||||
|
resumeActiveStream().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to resume active stream', err)
|
||||||
|
})
|
||||||
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import 'reactflow/dist/style.css'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useShallow } from 'zustand/react/shallow'
|
import { useShallow } from 'zustand/react/shallow'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/base-tool'
|
||||||
import type { OAuthProvider } from '@/lib/oauth'
|
import type { OAuthProvider } from '@/lib/oauth'
|
||||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||||
|
|||||||
@@ -5,10 +5,43 @@ import { CheckCircle, ChevronDown, ChevronRight, Loader2, Settings, XCircle } fr
|
|||||||
import { Badge } from '@/components/emcn'
|
import { Badge } from '@/components/emcn'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
||||||
import type { ToolCallGroup, ToolCallState } from '@/lib/copilot/types'
|
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
|
|
||||||
|
interface ToolCallState {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
displayName?: string
|
||||||
|
parameters?: Record<string, unknown>
|
||||||
|
state:
|
||||||
|
| 'detecting'
|
||||||
|
| 'pending'
|
||||||
|
| 'executing'
|
||||||
|
| 'completed'
|
||||||
|
| 'error'
|
||||||
|
| 'rejected'
|
||||||
|
| 'applied'
|
||||||
|
| 'ready_for_review'
|
||||||
|
| 'aborted'
|
||||||
|
| 'skipped'
|
||||||
|
| 'background'
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
duration?: number
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
progress?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ToolCallGroup {
|
||||||
|
id: string
|
||||||
|
toolCalls: ToolCallState[]
|
||||||
|
status: 'pending' | 'in_progress' | 'completed' | 'error'
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
summary?: string
|
||||||
|
}
|
||||||
|
|
||||||
interface ToolCallProps {
|
interface ToolCallProps {
|
||||||
toolCall: ToolCallState
|
toolCall: ToolCallState
|
||||||
isCompact?: boolean
|
isCompact?: boolean
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
import { useCallback } from 'react'
|
import { useCallback } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface Window {
|
||||||
|
__skipDiffRecording?: boolean
|
||||||
|
}
|
||||||
|
}
|
||||||
import type { Edge } from 'reactflow'
|
import type { Edge } from 'reactflow'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
|
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
|
||||||
@@ -908,7 +914,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Restore baseline state and broadcast to everyone
|
// Restore baseline state and broadcast to everyone
|
||||||
if (baselineSnapshot && activeWorkflowId) {
|
if (baselineSnapshot && activeWorkflowId) {
|
||||||
@@ -945,7 +951,7 @@ export function useUndoRedo() {
|
|||||||
logger.info('Clearing diff UI state')
|
logger.info('Clearing diff UI state')
|
||||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid apply-diff operation successfully')
|
logger.info('Undid apply-diff operation successfully')
|
||||||
@@ -965,7 +971,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Apply the before-accept state (with markers for this user)
|
// Apply the before-accept state (with markers for this user)
|
||||||
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
|
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
|
||||||
@@ -1004,7 +1010,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid accept-diff operation - restored diff view')
|
logger.info('Undid accept-diff operation - restored diff view')
|
||||||
@@ -1018,7 +1024,7 @@ export function useUndoRedo() {
|
|||||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Apply the before-reject state (with markers for this user)
|
// Apply the before-reject state (with markers for this user)
|
||||||
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
|
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
|
||||||
@@ -1055,7 +1061,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid reject-diff operation - restored diff view')
|
logger.info('Undid reject-diff operation - restored diff view')
|
||||||
@@ -1526,7 +1532,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
|
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
@@ -1567,7 +1573,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid apply-diff operation')
|
logger.info('Redid apply-diff operation')
|
||||||
@@ -1583,7 +1589,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||||
// Use setState directly to ensure synchronous clearing
|
// Use setState directly to ensure synchronous clearing
|
||||||
@@ -1621,7 +1627,7 @@ export function useUndoRedo() {
|
|||||||
operationId: opId,
|
operationId: opId,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid accept-diff operation - cleared diff view')
|
logger.info('Redid accept-diff operation - cleared diff view')
|
||||||
@@ -1635,7 +1641,7 @@ export function useUndoRedo() {
|
|||||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
;window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||||
// Use setState directly to ensure synchronous clearing
|
// Use setState directly to ensure synchronous clearing
|
||||||
@@ -1673,7 +1679,7 @@ export function useUndoRedo() {
|
|||||||
operationId: opId,
|
operationId: opId,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
;window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid reject-diff operation - cleared diff view')
|
logger.info('Redid reject-diff operation - cleared diff view')
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_CHAT_API_PATH, COPILOT_CHAT_STREAM_API_PATH } from '@/lib/copilot/constants'
|
||||||
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
||||||
|
|
||||||
const logger = createLogger('CopilotAPI')
|
const logger = createLogger('CopilotAPI')
|
||||||
@@ -82,6 +83,7 @@ export interface SendMessageRequest {
|
|||||||
executionId?: string
|
executionId?: string
|
||||||
}>
|
}>
|
||||||
commands?: string[]
|
commands?: string[]
|
||||||
|
resumeFromEventId?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -120,7 +122,7 @@ export async function sendStreamingMessage(
|
|||||||
request: SendMessageRequest
|
request: SendMessageRequest
|
||||||
): Promise<StreamingResponse> {
|
): Promise<StreamingResponse> {
|
||||||
try {
|
try {
|
||||||
const { abortSignal, ...requestBody } = request
|
const { abortSignal, resumeFromEventId, ...requestBody } = request
|
||||||
try {
|
try {
|
||||||
const preview = Array.isArray((requestBody as any).contexts)
|
const preview = Array.isArray((requestBody as any).contexts)
|
||||||
? (requestBody as any).contexts.map((c: any) => ({
|
? (requestBody as any).contexts.map((c: any) => ({
|
||||||
@@ -136,9 +138,54 @@ export async function sendStreamingMessage(
|
|||||||
? (requestBody as any).contexts.length
|
? (requestBody as any).contexts.length
|
||||||
: 0,
|
: 0,
|
||||||
contextsPreview: preview,
|
contextsPreview: preview,
|
||||||
|
resumeFromEventId,
|
||||||
})
|
})
|
||||||
} catch {}
|
} catch (error) {
|
||||||
const response = await fetch('/api/copilot/chat', {
|
logger.warn('Failed to log streaming message context preview', { error: error instanceof Error ? error.message : String(error) })
|
||||||
|
}
|
||||||
|
|
||||||
|
const streamId = request.userMessageId
|
||||||
|
if (typeof resumeFromEventId === 'number') {
|
||||||
|
if (!streamId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'streamId is required to resume a stream',
|
||||||
|
status: 400,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const url = `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent(
|
||||||
|
streamId
|
||||||
|
)}&from=${encodeURIComponent(String(resumeFromEventId))}`
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
signal: abortSignal,
|
||||||
|
credentials: 'include',
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorMessage = await handleApiError(response, 'Failed to resume streaming message')
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
status: response.status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.body) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'No response body received',
|
||||||
|
status: 500,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
stream: response.body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(COPILOT_CHAT_API_PATH, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ ...requestBody, stream: true }),
|
body: JSON.stringify({ ...requestBody, stream: true }),
|
||||||
|
|||||||
63
apps/sim/lib/copilot/chat-context.ts
Normal file
63
apps/sim/lib/copilot/chat-context.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { CopilotFiles } from '@/lib/uploads'
|
||||||
|
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatContext')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build conversation history from stored chat messages.
|
||||||
|
*/
|
||||||
|
export function buildConversationHistory(
|
||||||
|
messages: unknown[],
|
||||||
|
conversationId?: string
|
||||||
|
): { history: unknown[]; conversationId?: string } {
|
||||||
|
const history = Array.isArray(messages) ? messages : []
|
||||||
|
return {
|
||||||
|
history,
|
||||||
|
...(conversationId ? { conversationId } : {}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileAttachmentInput {
|
||||||
|
id: string
|
||||||
|
key: string
|
||||||
|
name?: string
|
||||||
|
filename?: string
|
||||||
|
mimeType?: string
|
||||||
|
media_type?: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileContent {
|
||||||
|
type: string
|
||||||
|
[key: string]: unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process file attachments into content for the payload.
|
||||||
|
*/
|
||||||
|
export async function processFileAttachments(
|
||||||
|
fileAttachments: FileAttachmentInput[],
|
||||||
|
userId: string
|
||||||
|
): Promise<FileContent[]> {
|
||||||
|
if (!Array.isArray(fileAttachments) || fileAttachments.length === 0) return []
|
||||||
|
|
||||||
|
const processedFileContents: FileContent[] = []
|
||||||
|
const requestId = `copilot-${userId}-${Date.now()}`
|
||||||
|
const processedAttachments = await CopilotFiles.processCopilotAttachments(fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0], requestId)
|
||||||
|
|
||||||
|
for (const { buffer, attachment } of processedAttachments) {
|
||||||
|
const fileContent = createFileContent(buffer, attachment.media_type)
|
||||||
|
if (fileContent) {
|
||||||
|
processedFileContents.push(fileContent as FileContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Processed file attachments for payload', {
|
||||||
|
userId,
|
||||||
|
inputCount: fileAttachments.length,
|
||||||
|
outputCount: processedFileContents.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return processedFileContents
|
||||||
|
}
|
||||||
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { copilotChats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatLifecycle')
|
||||||
|
|
||||||
|
export interface ChatLoadResult {
|
||||||
|
chatId: string
|
||||||
|
chat: typeof copilotChats.$inferSelect | null
|
||||||
|
conversationHistory: unknown[]
|
||||||
|
isNew: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve or create a copilot chat session.
|
||||||
|
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
|
||||||
|
*/
|
||||||
|
export async function resolveOrCreateChat(params: {
|
||||||
|
chatId?: string
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
model: string
|
||||||
|
}): Promise<ChatLoadResult> {
|
||||||
|
const { chatId, userId, workflowId, model } = params
|
||||||
|
|
||||||
|
if (chatId) {
|
||||||
|
const [chat] = await db
|
||||||
|
.select()
|
||||||
|
.from(copilotChats)
|
||||||
|
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
return {
|
||||||
|
chatId,
|
||||||
|
chat: chat ?? null,
|
||||||
|
conversationHistory: chat && Array.isArray(chat.messages) ? chat.messages : [],
|
||||||
|
isNew: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [newChat] = await db
|
||||||
|
.insert(copilotChats)
|
||||||
|
.values({
|
||||||
|
userId,
|
||||||
|
workflowId,
|
||||||
|
title: null,
|
||||||
|
model,
|
||||||
|
messages: [],
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
if (!newChat) {
|
||||||
|
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
|
||||||
|
return {
|
||||||
|
chatId: '',
|
||||||
|
chat: null,
|
||||||
|
conversationHistory: [],
|
||||||
|
isNew: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
chatId: newChat.id,
|
||||||
|
chat: newChat,
|
||||||
|
conversationHistory: [],
|
||||||
|
isNew: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
223
apps/sim/lib/copilot/chat-payload.ts
Normal file
223
apps/sim/lib/copilot/chat-payload.ts
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
|
import { tools } from '@/tools/registry'
|
||||||
|
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
||||||
|
import { type FileContent, processFileAttachments } from '@/lib/copilot/chat-context'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatPayload')
|
||||||
|
|
||||||
|
export interface BuildPayloadParams {
|
||||||
|
message: string
|
||||||
|
workflowId: string
|
||||||
|
userId: string
|
||||||
|
userMessageId: string
|
||||||
|
mode: string
|
||||||
|
model: string
|
||||||
|
conversationHistory?: unknown[]
|
||||||
|
contexts?: Array<{ type: string; content: string }>
|
||||||
|
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||||
|
commands?: string[]
|
||||||
|
chatId?: string
|
||||||
|
implicitFeedback?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ToolSchema {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
input_schema: Record<string, unknown>
|
||||||
|
defer_loading?: boolean
|
||||||
|
executeLocally?: boolean
|
||||||
|
oauth?: { required: boolean; provider: string }
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CredentialsPayload {
|
||||||
|
oauth: Record<string, { accessToken: string; accountId: string; name: string; expiresAt?: string }>
|
||||||
|
apiKeys: string[]
|
||||||
|
metadata?: {
|
||||||
|
connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }>
|
||||||
|
configuredApiKeys: string[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type MessageContent = string | Array<{ type: string; text?: string; [key: string]: unknown }>
|
||||||
|
|
||||||
|
interface ConversationMessage {
|
||||||
|
role: string
|
||||||
|
content: MessageContent
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildProviderConfig(selectedModel: string): CopilotProviderConfig | undefined {
|
||||||
|
const defaults = getCopilotModel('chat')
|
||||||
|
const envModel = env.COPILOT_MODEL || defaults.model
|
||||||
|
const providerEnv = env.COPILOT_PROVIDER
|
||||||
|
|
||||||
|
if (!providerEnv) return undefined
|
||||||
|
|
||||||
|
if (providerEnv === 'azure-openai') {
|
||||||
|
return {
|
||||||
|
provider: 'azure-openai',
|
||||||
|
model: envModel,
|
||||||
|
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||||
|
apiVersion: 'preview',
|
||||||
|
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (providerEnv === 'vertex') {
|
||||||
|
return {
|
||||||
|
provider: 'vertex',
|
||||||
|
model: envModel,
|
||||||
|
apiKey: env.COPILOT_API_KEY,
|
||||||
|
vertexProject: env.VERTEX_PROJECT,
|
||||||
|
vertexLocation: env.VERTEX_LOCATION,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
provider: providerEnv as Exclude<string, 'azure-openai' | 'vertex'>,
|
||||||
|
model: selectedModel,
|
||||||
|
apiKey: env.COPILOT_API_KEY,
|
||||||
|
} as CopilotProviderConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the request payload for the copilot backend.
|
||||||
|
*/
|
||||||
|
export async function buildCopilotRequestPayload(
|
||||||
|
params: BuildPayloadParams,
|
||||||
|
options: {
|
||||||
|
providerConfig?: CopilotProviderConfig
|
||||||
|
selectedModel: string
|
||||||
|
}
|
||||||
|
): Promise<Record<string, unknown>> {
|
||||||
|
const {
|
||||||
|
message, workflowId, userId, userMessageId, mode,
|
||||||
|
conversationHistory = [], contexts, fileAttachments,
|
||||||
|
commands, chatId, implicitFeedback,
|
||||||
|
} = params
|
||||||
|
|
||||||
|
const selectedModel = options.selectedModel
|
||||||
|
const providerConfig = options.providerConfig ?? buildProviderConfig(selectedModel)
|
||||||
|
|
||||||
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
|
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||||
|
|
||||||
|
const processedFileContents = await processFileAttachments(fileAttachments ?? [], userId)
|
||||||
|
|
||||||
|
const messages: ConversationMessage[] = []
|
||||||
|
for (const msg of conversationHistory as Array<Record<string, unknown>>) {
|
||||||
|
const msgAttachments = msg.fileAttachments as Array<Record<string, unknown>> | undefined
|
||||||
|
if (Array.isArray(msgAttachments) && msgAttachments.length > 0) {
|
||||||
|
const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [
|
||||||
|
{ type: 'text', text: msg.content as string },
|
||||||
|
]
|
||||||
|
const processedHistoricalAttachments = await processFileAttachments(msgAttachments as BuildPayloadParams['fileAttachments'] ?? [], userId)
|
||||||
|
for (const fileContent of processedHistoricalAttachments) {
|
||||||
|
content.push(fileContent)
|
||||||
|
}
|
||||||
|
messages.push({ role: msg.role as string, content })
|
||||||
|
} else {
|
||||||
|
messages.push({ role: msg.role as string, content: msg.content as string })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (implicitFeedback) {
|
||||||
|
messages.push({ role: 'system', content: implicitFeedback })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (processedFileContents.length > 0) {
|
||||||
|
const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [
|
||||||
|
{ type: 'text', text: message },
|
||||||
|
]
|
||||||
|
for (const fileContent of processedFileContents) {
|
||||||
|
content.push(fileContent)
|
||||||
|
}
|
||||||
|
messages.push({ role: 'user', content })
|
||||||
|
} else {
|
||||||
|
messages.push({ role: 'user', content: message })
|
||||||
|
}
|
||||||
|
|
||||||
|
let integrationTools: ToolSchema[] = []
|
||||||
|
let credentials: CredentialsPayload | null = null
|
||||||
|
|
||||||
|
if (effectiveMode === 'build') {
|
||||||
|
// function_execute sandbox tool is now defined in Go — no need to send it
|
||||||
|
|
||||||
|
try {
|
||||||
|
const rawCredentials = await getCredentialsServerTool.execute({ workflowId }, { userId })
|
||||||
|
|
||||||
|
const oauthMap: CredentialsPayload['oauth'] = {}
|
||||||
|
const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = []
|
||||||
|
for (const cred of rawCredentials?.oauth?.connected?.credentials ?? []) {
|
||||||
|
if (cred.accessToken) {
|
||||||
|
oauthMap[cred.provider] = {
|
||||||
|
accessToken: cred.accessToken,
|
||||||
|
accountId: cred.id,
|
||||||
|
name: cred.name,
|
||||||
|
}
|
||||||
|
connectedOAuth.push({ provider: cred.provider, name: cred.name })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
credentials = {
|
||||||
|
oauth: oauthMap,
|
||||||
|
apiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||||
|
metadata: {
|
||||||
|
connectedOAuth,
|
||||||
|
configuredApiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to fetch credentials for build payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { createUserToolSchema } = await import('@/tools/params')
|
||||||
|
const latestTools = getLatestVersionTools(tools)
|
||||||
|
|
||||||
|
integrationTools = Object.entries(latestTools).map(([toolId, toolConfig]) => {
|
||||||
|
const userSchema = createUserToolSchema(toolConfig)
|
||||||
|
const strippedName = stripVersionSuffix(toolId)
|
||||||
|
return {
|
||||||
|
name: strippedName,
|
||||||
|
description: toolConfig.description || toolConfig.name || strippedName,
|
||||||
|
input_schema: userSchema as unknown as Record<string, unknown>,
|
||||||
|
defer_loading: true,
|
||||||
|
...(toolConfig.oauth?.required && {
|
||||||
|
oauth: {
|
||||||
|
required: true,
|
||||||
|
provider: toolConfig.oauth.provider,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to build tool schemas for payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
message,
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
model: selectedModel,
|
||||||
|
mode: transportMode,
|
||||||
|
messageId: userMessageId,
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
...(providerConfig ? { provider: providerConfig } : {}),
|
||||||
|
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||||
|
...(chatId ? { chatId } : {}),
|
||||||
|
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||||
|
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||||
|
...(credentials ? { credentials } : {}),
|
||||||
|
...(commands && commands.length > 0 ? { commands } : {}),
|
||||||
|
}
|
||||||
|
}
|
||||||
144
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
144
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
import type {
|
||||||
|
ChatContext,
|
||||||
|
CopilotMessage,
|
||||||
|
MessageFileAttachment,
|
||||||
|
} from '@/stores/panel/copilot/types'
|
||||||
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
|
const TEXT_BLOCK_TYPE = 'text'
|
||||||
|
const THINKING_BLOCK_TYPE = 'thinking'
|
||||||
|
const CONTINUE_OPTIONS_TAG = '<options>{"1":"Continue"}</options>'
|
||||||
|
|
||||||
|
export function createUserMessage(
|
||||||
|
content: string,
|
||||||
|
fileAttachments?: MessageFileAttachment[],
|
||||||
|
contexts?: ChatContext[],
|
||||||
|
messageId?: string
|
||||||
|
): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: messageId || crypto.randomUUID(),
|
||||||
|
role: 'user',
|
||||||
|
content,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||||
|
...(contexts && contexts.length > 0 && { contexts }),
|
||||||
|
...(contexts &&
|
||||||
|
contexts.length > 0 && {
|
||||||
|
contentBlocks: [
|
||||||
|
{ type: 'contexts', contexts, timestamp: Date.now() },
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStreamingMessage(): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
role: 'assistant',
|
||||||
|
content: '',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createErrorMessage(
|
||||||
|
messageId: string,
|
||||||
|
content: string,
|
||||||
|
errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required'
|
||||||
|
): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: messageId,
|
||||||
|
role: 'assistant',
|
||||||
|
content,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
contentBlocks: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
content,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
errorType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendTextBlock(context: ClientStreamingContext, text: string) {
|
||||||
|
if (!text) return
|
||||||
|
context.accumulatedContent += text
|
||||||
|
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||||
|
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||||
|
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||||
|
lastBlock.content += text
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const newBlock: ClientContentBlock = { type: 'text', content: text, timestamp: Date.now() }
|
||||||
|
context.currentTextBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendContinueOption(content: string): string {
|
||||||
|
if (/<options>/i.test(content)) return content
|
||||||
|
const suffix = content.trim().length > 0 ? '\n\n' : ''
|
||||||
|
return `${content}${suffix}${CONTINUE_OPTIONS_TAG}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendContinueOptionBlock(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
if (!Array.isArray(blocks)) return blocks
|
||||||
|
const hasOptions = blocks.some(
|
||||||
|
(block) =>
|
||||||
|
block?.type === TEXT_BLOCK_TYPE &&
|
||||||
|
typeof block.content === 'string' &&
|
||||||
|
/<options>/i.test(block.content)
|
||||||
|
)
|
||||||
|
if (hasOptions) return blocks
|
||||||
|
return [
|
||||||
|
...blocks,
|
||||||
|
{
|
||||||
|
type: TEXT_BLOCK_TYPE,
|
||||||
|
content: CONTINUE_OPTIONS_TAG,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripContinueOption(content: string): string {
|
||||||
|
if (!content || !content.includes(CONTINUE_OPTIONS_TAG)) return content
|
||||||
|
const next = content.replace(CONTINUE_OPTIONS_TAG, '')
|
||||||
|
return next.replace(/\n{2,}\s*$/g, '\n').trimEnd()
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripContinueOptionFromBlocks(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
if (!Array.isArray(blocks)) return blocks
|
||||||
|
return blocks.flatMap((block) => {
|
||||||
|
if (
|
||||||
|
block?.type === TEXT_BLOCK_TYPE &&
|
||||||
|
typeof block.content === 'string' &&
|
||||||
|
block.content.includes(CONTINUE_OPTIONS_TAG)
|
||||||
|
) {
|
||||||
|
const nextContent = stripContinueOption(block.content)
|
||||||
|
if (!nextContent.trim()) return []
|
||||||
|
return [{ ...block, content: nextContent }]
|
||||||
|
}
|
||||||
|
return [block]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function beginThinkingBlock(context: ClientStreamingContext) {
|
||||||
|
if (!context.currentThinkingBlock) {
|
||||||
|
const newBlock: ClientContentBlock = { type: 'thinking', content: '', timestamp: Date.now(), startTime: Date.now() }
|
||||||
|
context.currentThinkingBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
|
|
||||||
|
export function finalizeThinkingBlock(context: ClientStreamingContext) {
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.currentThinkingBlock.duration =
|
||||||
|
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = false
|
||||||
|
context.currentThinkingBlock = null
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
741
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
741
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
@@ -0,0 +1,741 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import {
|
||||||
|
isBackgroundState,
|
||||||
|
isRejectedState,
|
||||||
|
isReviewState,
|
||||||
|
resolveToolDisplay,
|
||||||
|
} from '@/lib/copilot/store-utils'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
import {
|
||||||
|
appendTextBlock,
|
||||||
|
beginThinkingBlock,
|
||||||
|
finalizeThinkingBlock,
|
||||||
|
} from './content-blocks'
|
||||||
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
|
const TEXT_BLOCK_TYPE = 'text'
|
||||||
|
const MAX_BATCH_INTERVAL = 50
|
||||||
|
const MIN_BATCH_INTERVAL = 16
|
||||||
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
|
if (typeof window === 'undefined') return
|
||||||
|
try {
|
||||||
|
if (!info) {
|
||||||
|
window.sessionStorage.removeItem(STREAM_STORAGE_KEY)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
window.sessionStorage.setItem(STREAM_STORAGE_KEY, JSON.stringify(info))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to write active stream to storage', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export type SSEHandler = (
|
||||||
|
data: SSEEvent,
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: StoreSet
|
||||||
|
) => Promise<void> | void
|
||||||
|
|
||||||
|
const streamingUpdateQueue = new Map<string, ClientStreamingContext>()
|
||||||
|
let streamingUpdateRAF: number | null = null
|
||||||
|
let lastBatchTime = 0
|
||||||
|
|
||||||
|
export function stopStreamingUpdates() {
|
||||||
|
if (streamingUpdateRAF !== null) {
|
||||||
|
cancelAnimationFrame(streamingUpdateRAF)
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
}
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
function createOptimizedContentBlocks(contentBlocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
const result: ClientContentBlock[] = new Array(contentBlocks.length)
|
||||||
|
for (let i = 0; i < contentBlocks.length; i++) {
|
||||||
|
const block = contentBlocks[i]
|
||||||
|
result[i] = { ...block }
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
export function flushStreamingUpdates(set: StoreSet) {
|
||||||
|
if (streamingUpdateRAF !== null) {
|
||||||
|
cancelAnimationFrame(streamingUpdateRAF)
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
}
|
||||||
|
if (streamingUpdateQueue.size === 0) return
|
||||||
|
|
||||||
|
const updates = new Map(streamingUpdateQueue)
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
|
||||||
|
set((state: CopilotStore) => {
|
||||||
|
if (updates.size === 0) return state
|
||||||
|
return {
|
||||||
|
messages: state.messages.map((msg) => {
|
||||||
|
const update = updates.get(msg.id)
|
||||||
|
if (update) {
|
||||||
|
return {
|
||||||
|
...msg,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
update.contentBlocks.length > 0 ? createOptimizedContentBlocks(update.contentBlocks) : [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateStreamingMessage(set: StoreSet, context: ClientStreamingContext) {
|
||||||
|
if (context.suppressStreamingUpdates) return
|
||||||
|
const now = performance.now()
|
||||||
|
streamingUpdateQueue.set(context.messageId, context)
|
||||||
|
const timeSinceLastBatch = now - lastBatchTime
|
||||||
|
const shouldFlushImmediately =
|
||||||
|
streamingUpdateQueue.size >= MAX_QUEUE_SIZE || timeSinceLastBatch > MAX_BATCH_INTERVAL
|
||||||
|
|
||||||
|
if (streamingUpdateRAF === null) {
|
||||||
|
const scheduleUpdate = () => {
|
||||||
|
streamingUpdateRAF = requestAnimationFrame(() => {
|
||||||
|
const updates = new Map(streamingUpdateQueue)
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
lastBatchTime = performance.now()
|
||||||
|
set((state: CopilotStore) => {
|
||||||
|
if (updates.size === 0) return state
|
||||||
|
const messages = state.messages
|
||||||
|
const lastMessage = messages[messages.length - 1]
|
||||||
|
const lastMessageUpdate = lastMessage ? updates.get(lastMessage.id) : null
|
||||||
|
if (updates.size === 1 && lastMessageUpdate) {
|
||||||
|
const newMessages = [...messages]
|
||||||
|
newMessages[messages.length - 1] = {
|
||||||
|
...lastMessage,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
lastMessageUpdate.contentBlocks.length > 0
|
||||||
|
? createOptimizedContentBlocks(lastMessageUpdate.contentBlocks)
|
||||||
|
: [],
|
||||||
|
}
|
||||||
|
return { messages: newMessages }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
messages: messages.map((msg) => {
|
||||||
|
const update = updates.get(msg.id)
|
||||||
|
if (update) {
|
||||||
|
return {
|
||||||
|
...msg,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
update.contentBlocks.length > 0
|
||||||
|
? createOptimizedContentBlocks(update.contentBlocks)
|
||||||
|
: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (shouldFlushImmediately) scheduleUpdate()
|
||||||
|
else setTimeout(scheduleUpdate, Math.max(0, MIN_BATCH_INTERVAL - timeSinceLastBatch))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function upsertToolCallBlock(context: ClientStreamingContext, toolCall: CopilotToolCall) {
|
||||||
|
let found = false
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) {
|
||||||
|
context.contentBlocks[i] = { ...b, toolCall }
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stripThinkingTags(text: string): string {
|
||||||
|
return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
function appendThinkingContent(context: ClientStreamingContext, text: string) {
|
||||||
|
if (!text) return
|
||||||
|
const cleanedText = stripThinkingTags(text)
|
||||||
|
if (!cleanedText) return
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.currentThinkingBlock.content += cleanedText
|
||||||
|
} else {
|
||||||
|
const newBlock: ClientContentBlock = { type: 'thinking', content: cleanedText, timestamp: Date.now(), startTime: Date.now() }
|
||||||
|
context.currentThinkingBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
|
|
||||||
|
export const sseHandlers: Record<string, SSEHandler> = {
|
||||||
|
chat_id: async (data, context, get, set) => {
|
||||||
|
context.newChatId = data.chatId
|
||||||
|
const { currentChat, activeStream } = get()
|
||||||
|
if (!currentChat && context.newChatId) {
|
||||||
|
await get().handleNewChatCreation(context.newChatId)
|
||||||
|
}
|
||||||
|
if (activeStream && context.newChatId && !activeStream.chatId) {
|
||||||
|
const updatedStream = { ...activeStream, chatId: context.newChatId }
|
||||||
|
set({ activeStream: updatedStream })
|
||||||
|
writeActiveStreamToStorage(updatedStream)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
title_updated: (_data, _context, get, set) => {
|
||||||
|
const title = _data.title
|
||||||
|
if (!title) return
|
||||||
|
const { currentChat, chats } = get()
|
||||||
|
if (currentChat) {
|
||||||
|
set({
|
||||||
|
currentChat: { ...currentChat, title },
|
||||||
|
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_result: (data, context, get, set) => {
|
||||||
|
try {
|
||||||
|
const eventData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined = data?.toolCallId || (eventData.id as string | undefined)
|
||||||
|
const success: boolean | undefined = data?.success
|
||||||
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
const skipped: boolean = resultObj.skipped === true
|
||||||
|
if (!toolCallId) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const current = toolCallsById[toolCallId]
|
||||||
|
if (current) {
|
||||||
|
if (
|
||||||
|
isRejectedState(current.state) ||
|
||||||
|
isReviewState(current.state) ||
|
||||||
|
isBackgroundState(current.state)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const targetState = success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
updatedMap[toolCallId] = {
|
||||||
|
...current,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
|
if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') {
|
||||||
|
try {
|
||||||
|
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||||
|
const input = asRecord(current.params || current.input)
|
||||||
|
const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined
|
||||||
|
if (todoId) {
|
||||||
|
get().updatePlanTodoStatus(todoId, 'completed')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process checkoff_todo tool result', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'mark_todo_in_progress'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||||
|
const input = asRecord(current.params || current.input)
|
||||||
|
const todoId = (input.id || input.todoId || result.id || result.todoId) as string | undefined
|
||||||
|
if (todoId) {
|
||||||
|
get().updatePlanTodoStatus(todoId, 'executing')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process mark_todo_in_progress tool result', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current.name === 'edit_workflow') {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const workflowState = asRecord(resultPayload?.workflowState)
|
||||||
|
const hasWorkflowState = !!resultPayload?.workflowState
|
||||||
|
logger.info('[SSE] edit_workflow result received', {
|
||||||
|
hasWorkflowState,
|
||||||
|
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||||
|
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||||
|
})
|
||||||
|
if (hasWorkflowState) {
|
||||||
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
|
diffStore.setProposedChanges(resultPayload.workflowState as WorkflowState).catch((err) => {
|
||||||
|
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[SSE] edit_workflow result handling failed', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||||
|
if (
|
||||||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
|
isReviewState(b.toolCall?.state) ||
|
||||||
|
isBackgroundState(b.toolCall?.state)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
const targetState = success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
context.contentBlocks[i] = {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...b.toolCall,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
b.toolCall?.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
b.toolCall?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process tool_result SSE event', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_error: (data, context, get, set) => {
|
||||||
|
try {
|
||||||
|
const errorData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined = data?.toolCallId || (errorData.id as string | undefined)
|
||||||
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
|
if (!toolCallId) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const current = toolCallsById[toolCallId]
|
||||||
|
if (current) {
|
||||||
|
if (
|
||||||
|
isRejectedState(current.state) ||
|
||||||
|
isReviewState(current.state) ||
|
||||||
|
isBackgroundState(current.state)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
updatedMap[toolCallId] = {
|
||||||
|
...current,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
}
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||||
|
if (
|
||||||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
|
isReviewState(b.toolCall?.state) ||
|
||||||
|
isBackgroundState(b.toolCall?.state)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
context.contentBlocks[i] = {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...b.toolCall,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
b.toolCall?.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
b.toolCall?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process tool_error SSE event', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_generating: (data, context, get, set) => {
|
||||||
|
const { toolCallId, toolName } = data
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
|
if (!toolCallsById[toolCallId]) {
|
||||||
|
const initialState = ClientToolCallState.pending
|
||||||
|
const tc: CopilotToolCall = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
state: initialState,
|
||||||
|
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||||
|
}
|
||||||
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
logger.info('[toolCallsById] map updated', updated)
|
||||||
|
|
||||||
|
upsertToolCallBlock(context, tc)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: (data, context, get, set) => {
|
||||||
|
const toolData = asRecord(data?.data)
|
||||||
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
|
if (!id) return
|
||||||
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
|
const existing = toolCallsById[id]
|
||||||
|
const next: CopilotToolCall = existing
|
||||||
|
? {
|
||||||
|
...existing,
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
id,
|
||||||
|
name: name || 'unknown_tool',
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
logger.info('[toolCallsById] → pending', { id, name, params: args })
|
||||||
|
|
||||||
|
upsertToolCallBlock(context, next)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
|
||||||
|
if (isPartial) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
},
|
||||||
|
reasoning: (data, context, _get, set) => {
|
||||||
|
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||||
|
if (phase === 'start') {
|
||||||
|
beginThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (phase === 'end') {
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||||
|
if (!chunk) return
|
||||||
|
appendThinkingContent(context, chunk)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
},
|
||||||
|
content: (data, context, get, set) => {
|
||||||
|
if (!data.data) return
|
||||||
|
context.pendingContent += data.data
|
||||||
|
|
||||||
|
let contentToProcess = context.pendingContent
|
||||||
|
let hasProcessedContent = false
|
||||||
|
|
||||||
|
const thinkingStartRegex = /<thinking>/
|
||||||
|
const thinkingEndRegex = /<\/thinking>/
|
||||||
|
const designWorkflowStartRegex = /<design_workflow>/
|
||||||
|
const designWorkflowEndRegex = /<\/design_workflow>/
|
||||||
|
|
||||||
|
const splitTrailingPartialTag = (
|
||||||
|
text: string,
|
||||||
|
tags: string[]
|
||||||
|
): { text: string; remaining: string } => {
|
||||||
|
const partialIndex = text.lastIndexOf('<')
|
||||||
|
if (partialIndex < 0) {
|
||||||
|
return { text, remaining: '' }
|
||||||
|
}
|
||||||
|
const possibleTag = text.substring(partialIndex)
|
||||||
|
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
|
||||||
|
if (!matchesTagStart) {
|
||||||
|
return { text, remaining: '' }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
text: text.substring(0, partialIndex),
|
||||||
|
remaining: possibleTag,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (contentToProcess.length > 0) {
|
||||||
|
if (context.isInDesignWorkflowBlock) {
|
||||||
|
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
|
||||||
|
if (endMatch) {
|
||||||
|
const designContent = contentToProcess.substring(0, endMatch.index)
|
||||||
|
context.designWorkflowContent += designContent
|
||||||
|
context.isInDesignWorkflowBlock = false
|
||||||
|
|
||||||
|
logger.info('[design_workflow] Tag complete, setting plan content', {
|
||||||
|
contentLength: context.designWorkflowContent.length,
|
||||||
|
})
|
||||||
|
set({ streamingPlanContent: context.designWorkflowContent })
|
||||||
|
|
||||||
|
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
|
||||||
|
'</design_workflow>',
|
||||||
|
])
|
||||||
|
context.designWorkflowContent += text
|
||||||
|
|
||||||
|
set({ streamingPlanContent: context.designWorkflowContent })
|
||||||
|
|
||||||
|
contentToProcess = remaining
|
||||||
|
hasProcessedContent = true
|
||||||
|
if (remaining) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
|
||||||
|
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
|
||||||
|
if (designStartMatch) {
|
||||||
|
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
|
||||||
|
if (textBeforeDesign) {
|
||||||
|
appendTextBlock(context, textBeforeDesign)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
context.isInDesignWorkflowBlock = true
|
||||||
|
context.designWorkflowContent = ''
|
||||||
|
contentToProcess = contentToProcess.substring(
|
||||||
|
designStartMatch.index + designStartMatch[0].length
|
||||||
|
)
|
||||||
|
hasProcessedContent = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
|
||||||
|
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
|
||||||
|
const hasMark = nextMarkIndex >= 0
|
||||||
|
const hasCheck = nextCheckIndex >= 0
|
||||||
|
|
||||||
|
const nextTagIndex =
|
||||||
|
hasMark && hasCheck
|
||||||
|
? Math.min(nextMarkIndex, nextCheckIndex)
|
||||||
|
: hasMark
|
||||||
|
? nextMarkIndex
|
||||||
|
: hasCheck
|
||||||
|
? nextCheckIndex
|
||||||
|
: -1
|
||||||
|
|
||||||
|
if (nextTagIndex >= 0) {
|
||||||
|
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
|
||||||
|
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
|
||||||
|
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
|
||||||
|
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
|
||||||
|
|
||||||
|
if (closingIndex === -1) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const todoId = contentToProcess
|
||||||
|
.substring(nextTagIndex + tagStart.length, closingIndex)
|
||||||
|
.trim()
|
||||||
|
logger.info(
|
||||||
|
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
|
||||||
|
{ todoId }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (todoId) {
|
||||||
|
try {
|
||||||
|
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
|
||||||
|
logger.info(
|
||||||
|
isMarkTodo
|
||||||
|
? '[TODO] Successfully marked todo in progress'
|
||||||
|
: '[TODO] Successfully checked off todo',
|
||||||
|
{ todoId }
|
||||||
|
)
|
||||||
|
} catch (e) {
|
||||||
|
logger.error(
|
||||||
|
isMarkTodo
|
||||||
|
? '[TODO] Failed to mark todo in progress'
|
||||||
|
: '[TODO] Failed to checkoff todo',
|
||||||
|
{ todoId, error: e }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
|
||||||
|
}
|
||||||
|
|
||||||
|
let beforeTag = contentToProcess.substring(0, nextTagIndex)
|
||||||
|
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
|
||||||
|
|
||||||
|
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
|
||||||
|
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
|
||||||
|
|
||||||
|
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
|
||||||
|
afterTag = afterTag.replace(/^(\r?\n)+/, '')
|
||||||
|
|
||||||
|
contentToProcess =
|
||||||
|
beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
|
||||||
|
context.currentTextBlock = null
|
||||||
|
hasProcessedContent = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.isInThinkingBlock) {
|
||||||
|
const endMatch = thinkingEndRegex.exec(contentToProcess)
|
||||||
|
if (endMatch) {
|
||||||
|
const thinkingContent = contentToProcess.substring(0, endMatch.index)
|
||||||
|
appendThinkingContent(context, thinkingContent)
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
|
||||||
|
if (text) {
|
||||||
|
appendThinkingContent(context, text)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
contentToProcess = remaining
|
||||||
|
if (remaining) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const startMatch = thinkingStartRegex.exec(contentToProcess)
|
||||||
|
if (startMatch) {
|
||||||
|
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
|
||||||
|
if (textBeforeThinking) {
|
||||||
|
appendTextBlock(context, textBeforeThinking)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
let partialTagIndex = contentToProcess.lastIndexOf('<')
|
||||||
|
|
||||||
|
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
|
||||||
|
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
|
||||||
|
|
||||||
|
if (partialMarkTodo > partialTagIndex) {
|
||||||
|
partialTagIndex = partialMarkTodo
|
||||||
|
}
|
||||||
|
if (partialCheckoffTodo > partialTagIndex) {
|
||||||
|
partialTagIndex = partialCheckoffTodo
|
||||||
|
}
|
||||||
|
|
||||||
|
let textToAdd = contentToProcess
|
||||||
|
let remaining = ''
|
||||||
|
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
|
||||||
|
textToAdd = contentToProcess.substring(0, partialTagIndex)
|
||||||
|
remaining = contentToProcess.substring(partialTagIndex)
|
||||||
|
}
|
||||||
|
if (textToAdd) {
|
||||||
|
appendTextBlock(context, textToAdd)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
contentToProcess = remaining
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
context.pendingContent = contentToProcess
|
||||||
|
if (hasProcessedContent) {
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
done: (_data, context) => {
|
||||||
|
logger.info('[SSE] DONE EVENT RECEIVED', {
|
||||||
|
doneEventCount: context.doneEventCount,
|
||||||
|
data: _data,
|
||||||
|
})
|
||||||
|
context.doneEventCount++
|
||||||
|
if (context.doneEventCount >= 1) {
|
||||||
|
logger.info('[SSE] Setting streamComplete = true, stream will terminate')
|
||||||
|
context.streamComplete = true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: (data, context, _get, set) => {
|
||||||
|
logger.error('Stream error:', data.error)
|
||||||
|
set((state: CopilotStore) => ({
|
||||||
|
messages: state.messages.map((msg) =>
|
||||||
|
msg.id === context.messageId
|
||||||
|
? {
|
||||||
|
...msg,
|
||||||
|
content: context.accumulatedContent || 'An error occurred.',
|
||||||
|
error: data.error,
|
||||||
|
}
|
||||||
|
: msg
|
||||||
|
),
|
||||||
|
}))
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
stream_end: (_data, context, _get, set) => {
|
||||||
|
if (context.pendingContent) {
|
||||||
|
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||||
|
appendThinkingContent(context, context.pendingContent)
|
||||||
|
} else if (context.pendingContent.trim()) {
|
||||||
|
appendTextBlock(context, context.pendingContent)
|
||||||
|
}
|
||||||
|
context.pendingContent = ''
|
||||||
|
}
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
},
|
||||||
|
default: () => {},
|
||||||
|
}
|
||||||
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export { sseHandlers } from './handlers'
|
||||||
|
export { subAgentSSEHandlers, applySseEvent } from './subagent-handlers'
|
||||||
|
export type { SSEHandler } from './handlers'
|
||||||
373
apps/sim/lib/copilot/client-sse/subagent-handlers.ts
Normal file
373
apps/sim/lib/copilot/client-sse/subagent-handlers.ts
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import type { ClientStreamingContext } from './types'
|
||||||
|
import { sseHandlers, type SSEHandler, updateStreamingMessage } from './handlers'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export function appendSubAgentContent(
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
parentToolCallId: string,
|
||||||
|
text: string
|
||||||
|
) {
|
||||||
|
if (!context.subAgentContent[parentToolCallId]) {
|
||||||
|
context.subAgentContent[parentToolCallId] = ''
|
||||||
|
}
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
context.subAgentBlocks[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
context.subAgentContent[parentToolCallId] += text
|
||||||
|
const blocks = context.subAgentBlocks[parentToolCallId]
|
||||||
|
const lastBlock = blocks[blocks.length - 1]
|
||||||
|
if (lastBlock && lastBlock.type === 'subagent_text') {
|
||||||
|
lastBlock.content = (lastBlock.content || '') + text
|
||||||
|
} else {
|
||||||
|
blocks.push({
|
||||||
|
type: 'subagent_text',
|
||||||
|
content: text,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateToolCallWithSubAgentData(
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: StoreSet,
|
||||||
|
parentToolCallId: string
|
||||||
|
) {
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[parentToolCallId]
|
||||||
|
if (!parentToolCall) {
|
||||||
|
logger.warn('[SubAgent] updateToolCallWithSubAgentData: parent tool call not found', {
|
||||||
|
parentToolCallId,
|
||||||
|
availableToolCallIds: Object.keys(toolCallsById),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = context.subAgentBlocks[parentToolCallId] ?? []
|
||||||
|
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentContent: context.subAgentContent[parentToolCallId] || '',
|
||||||
|
subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] ?? [],
|
||||||
|
subAgentBlocks: blocks,
|
||||||
|
subAgentStreaming: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SubAgent] Updating tool call with subagent data', {
|
||||||
|
parentToolCallId,
|
||||||
|
parentToolName: parentToolCall.name,
|
||||||
|
subAgentContentLength: updatedToolCall.subAgentContent?.length,
|
||||||
|
subAgentBlocksCount: updatedToolCall.subAgentBlocks?.length,
|
||||||
|
subAgentToolCallsCount: updatedToolCall.subAgentToolCalls?.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
|
let foundInContentBlocks = false
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b.type === 'tool_call' && b.toolCall?.id === parentToolCallId) {
|
||||||
|
context.contentBlocks[i] = { ...b, toolCall: updatedToolCall }
|
||||||
|
foundInContentBlocks = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!foundInContentBlocks) {
|
||||||
|
logger.warn('[SubAgent] Parent tool call not found in contentBlocks', {
|
||||||
|
parentToolCallId,
|
||||||
|
contentBlocksCount: context.contentBlocks.length,
|
||||||
|
toolCallBlockIds: context.contentBlocks
|
||||||
|
.filter((b) => b.type === 'tool_call')
|
||||||
|
.map((b) => b.toolCall?.id),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||||
|
start: () => {
|
||||||
|
// Subagent start event - no action needed, parent is already tracked from subagent_start
|
||||||
|
},
|
||||||
|
|
||||||
|
content: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
const contentStr = typeof data.data === 'string' ? data.data : (data.content || '')
|
||||||
|
logger.info('[SubAgent] content event', {
|
||||||
|
parentToolCallId,
|
||||||
|
hasData: !!contentStr,
|
||||||
|
dataPreview: contentStr ? contentStr.substring(0, 50) : null,
|
||||||
|
})
|
||||||
|
if (!parentToolCallId || !contentStr) {
|
||||||
|
logger.warn('[SubAgent] content missing parentToolCallId or data', {
|
||||||
|
parentToolCallId,
|
||||||
|
hasData: !!contentStr,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
appendSubAgentContent(context, parentToolCallId, contentStr)
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
reasoning: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
const dataObj = asRecord(data?.data)
|
||||||
|
const phase = data?.phase || (dataObj.phase as string | undefined)
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
if (phase === 'start' || phase === 'end') return
|
||||||
|
|
||||||
|
const chunk = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||||
|
if (!chunk) return
|
||||||
|
|
||||||
|
appendSubAgentContent(context, parentToolCallId, chunk)
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_generating: () => {
|
||||||
|
// Tool generating event - no action needed, we'll handle the actual tool_call
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_call: async (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
const toolData = asRecord(data?.data)
|
||||||
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
|
if (!id || !name) return
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
|
||||||
|
let args: Record<string, unknown> | undefined =
|
||||||
|
(toolData.arguments || toolData.input) as Record<string, unknown> | undefined
|
||||||
|
|
||||||
|
if (typeof args === 'string') {
|
||||||
|
try {
|
||||||
|
args = JSON.parse(args) as Record<string, unknown>
|
||||||
|
} catch {
|
||||||
|
logger.warn('[SubAgent] Failed to parse arguments string', { args })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SubAgent] tool_call received', {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
hasArgs: !!args,
|
||||||
|
argsKeys: args ? Object.keys(args) : [],
|
||||||
|
toolDataKeys: Object.keys(toolData),
|
||||||
|
dataKeys: Object.keys(data ?? {}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
context.subAgentBlocks[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
|
(tc: CopilotToolCall) => tc.id === id
|
||||||
|
)
|
||||||
|
const subAgentToolCall: CopilotToolCall = {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = subAgentToolCall
|
||||||
|
} else {
|
||||||
|
context.subAgentToolCalls[parentToolCallId].push(subAgentToolCall)
|
||||||
|
|
||||||
|
context.subAgentBlocks[parentToolCallId].push({
|
||||||
|
type: 'subagent_tool_call',
|
||||||
|
toolCall: subAgentToolCall,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const updated = { ...toolCallsById, [id]: subAgentToolCall }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
|
||||||
|
if (isPartial) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_result: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
const resultData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined = data?.toolCallId || (resultData.id as string | undefined)
|
||||||
|
const success: boolean | undefined = data?.success !== false
|
||||||
|
if (!toolCallId) return
|
||||||
|
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
|
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||||
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
|
const updatedSubAgentToolCall = {
|
||||||
|
...existing,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||||
|
}
|
||||||
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
|
for (const block of context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
if (block.type === 'subagent_tool_call' && block.toolCall?.id === toolCallId) {
|
||||||
|
block.toolCall = updatedSubAgentToolCall
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
if (toolCallsById[toolCallId]) {
|
||||||
|
const updatedMap = {
|
||||||
|
...toolCallsById,
|
||||||
|
[toolCallId]: updatedSubAgentToolCall,
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
logger.info('[SubAgent] Updated subagent tool call state in toolCallsById', {
|
||||||
|
toolCallId,
|
||||||
|
name: existing.name,
|
||||||
|
state: targetState,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
done: (_data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function applySseEvent(
|
||||||
|
rawData: SSEEvent,
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: (next: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)) => void
|
||||||
|
): Promise<boolean> {
|
||||||
|
const normalizedEvent = normalizeSseEvent(rawData)
|
||||||
|
if (shouldSkipToolCallEvent(normalizedEvent) || shouldSkipToolResultEvent(normalizedEvent)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
const data = normalizedEvent
|
||||||
|
|
||||||
|
if (data.type === 'subagent_start') {
|
||||||
|
const startData = asRecord(data.data)
|
||||||
|
const toolCallId = startData.tool_call_id as string | undefined
|
||||||
|
if (toolCallId) {
|
||||||
|
context.subAgentParentToolCallId = toolCallId
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[toolCallId]
|
||||||
|
if (parentToolCall) {
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentStreaming: true,
|
||||||
|
}
|
||||||
|
const updatedMap = { ...toolCallsById, [toolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
}
|
||||||
|
logger.info('[SSE] Subagent session started', {
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId: toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type === 'subagent_end') {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (parentToolCallId) {
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[parentToolCallId]
|
||||||
|
if (parentToolCall) {
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentContent: context.subAgentContent[parentToolCallId] || '',
|
||||||
|
subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] ?? [],
|
||||||
|
subAgentBlocks: context.subAgentBlocks[parentToolCallId] ?? [],
|
||||||
|
subAgentStreaming: false,
|
||||||
|
}
|
||||||
|
const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
logger.info('[SSE] Subagent session ended', {
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId,
|
||||||
|
contentLength: context.subAgentContent[parentToolCallId]?.length || 0,
|
||||||
|
toolCallCount: context.subAgentToolCalls[parentToolCallId]?.length || 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
context.subAgentParentToolCallId = undefined
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.subagent) {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) {
|
||||||
|
logger.warn('[SSE] Subagent event without parent tool call ID', {
|
||||||
|
type: data.type,
|
||||||
|
subagent: data.subagent,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SSE] Processing subagent event', {
|
||||||
|
type: data.type,
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId,
|
||||||
|
hasHandler: !!subAgentSSEHandlers[data.type],
|
||||||
|
})
|
||||||
|
|
||||||
|
const subAgentHandler = subAgentSSEHandlers[data.type]
|
||||||
|
if (subAgentHandler) {
|
||||||
|
await subAgentHandler(data, context, get, set)
|
||||||
|
} else {
|
||||||
|
logger.warn('[SSE] No handler for subagent event type', { type: data.type })
|
||||||
|
}
|
||||||
|
return !context.streamComplete
|
||||||
|
}
|
||||||
|
|
||||||
|
const handler = sseHandlers[data.type] || sseHandlers.default
|
||||||
|
await handler(data, context, get, set)
|
||||||
|
return !context.streamComplete
|
||||||
|
}
|
||||||
41
apps/sim/lib/copilot/client-sse/types.ts
Normal file
41
apps/sim/lib/copilot/client-sse/types.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import type { ChatContext, CopilotToolCall, SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A content block used in copilot messages and during streaming.
|
||||||
|
* Uses a literal type union for `type` to stay compatible with CopilotMessage.
|
||||||
|
*/
|
||||||
|
export type ContentBlockType = 'text' | 'thinking' | 'tool_call' | 'contexts'
|
||||||
|
|
||||||
|
export interface ClientContentBlock {
|
||||||
|
type: ContentBlockType
|
||||||
|
content?: string
|
||||||
|
timestamp: number
|
||||||
|
toolCall?: CopilotToolCall | null
|
||||||
|
startTime?: number
|
||||||
|
duration?: number
|
||||||
|
contexts?: ChatContext[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StreamingContext {
|
||||||
|
messageId: string
|
||||||
|
accumulatedContent: string
|
||||||
|
contentBlocks: ClientContentBlock[]
|
||||||
|
currentTextBlock: ClientContentBlock | null
|
||||||
|
isInThinkingBlock: boolean
|
||||||
|
currentThinkingBlock: ClientContentBlock | null
|
||||||
|
isInDesignWorkflowBlock: boolean
|
||||||
|
designWorkflowContent: string
|
||||||
|
pendingContent: string
|
||||||
|
newChatId?: string
|
||||||
|
doneEventCount: number
|
||||||
|
streamComplete?: boolean
|
||||||
|
wasAborted?: boolean
|
||||||
|
suppressContinueOption?: boolean
|
||||||
|
subAgentParentToolCallId?: string
|
||||||
|
subAgentContent: Record<string, string>
|
||||||
|
subAgentToolCalls: Record<string, CopilotToolCall[]>
|
||||||
|
subAgentBlocks: Record<string, SubAgentContentBlock[]>
|
||||||
|
suppressStreamingUpdates?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ClientStreamingContext = StreamingContext
|
||||||
@@ -1,2 +1,115 @@
|
|||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
export const SIM_AGENT_API_URL_DEFAULT = 'https://copilot.sim.ai'
|
export const SIM_AGENT_API_URL_DEFAULT = 'https://copilot.sim.ai'
|
||||||
export const SIM_AGENT_VERSION = '1.0.3'
|
export const SIM_AGENT_VERSION = '1.0.3'
|
||||||
|
|
||||||
|
/** Resolved copilot backend URL — reads from env with fallback to default. */
|
||||||
|
const rawAgentUrl = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||||
|
export const SIM_AGENT_API_URL =
|
||||||
|
rawAgentUrl.startsWith('http://') || rawAgentUrl.startsWith('https://')
|
||||||
|
? rawAgentUrl
|
||||||
|
: SIM_AGENT_API_URL_DEFAULT
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Redis key prefixes
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Redis key prefix for tool call confirmation payloads (polled by waitForToolDecision). */
|
||||||
|
export const REDIS_TOOL_CALL_PREFIX = 'tool_call:'
|
||||||
|
|
||||||
|
/** Redis key prefix for copilot SSE stream buffers. */
|
||||||
|
export const REDIS_COPILOT_STREAM_PREFIX = 'copilot_stream:'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Timeouts
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Default timeout for the copilot orchestration stream loop (5 min). */
|
||||||
|
export const ORCHESTRATION_TIMEOUT_MS = 300_000
|
||||||
|
|
||||||
|
/** Timeout for the client-side streaming response handler (10 min). */
|
||||||
|
export const STREAM_TIMEOUT_MS = 600_000
|
||||||
|
|
||||||
|
/** TTL for Redis tool call confirmation entries (24 h). */
|
||||||
|
export const REDIS_TOOL_CALL_TTL_SECONDS = 86_400
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tool decision polling
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Initial poll interval when waiting for a user tool decision. */
|
||||||
|
export const TOOL_DECISION_INITIAL_POLL_MS = 100
|
||||||
|
|
||||||
|
/** Maximum poll interval when waiting for a user tool decision. */
|
||||||
|
export const TOOL_DECISION_MAX_POLL_MS = 3_000
|
||||||
|
|
||||||
|
/** Backoff multiplier for the tool decision poll interval. */
|
||||||
|
export const TOOL_DECISION_POLL_BACKOFF = 1.5
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Stream resume
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum number of resume attempts before giving up. */
|
||||||
|
export const MAX_RESUME_ATTEMPTS = 3
|
||||||
|
|
||||||
|
/** SessionStorage key for persisting active stream metadata across page reloads. */
|
||||||
|
export const STREAM_STORAGE_KEY = 'copilot_active_stream'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Client-side streaming batching
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Delay (ms) before processing the next queued message after stream completion. */
|
||||||
|
export const QUEUE_PROCESS_DELAY_MS = 100
|
||||||
|
|
||||||
|
/** Delay (ms) before invalidating subscription queries after stream completion. */
|
||||||
|
export const SUBSCRIPTION_INVALIDATE_DELAY_MS = 1_000
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// UI helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum character length for an optimistic chat title derived from a user message. */
|
||||||
|
export const OPTIMISTIC_TITLE_MAX_LENGTH = 50
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Copilot API paths (client-side fetch targets)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** POST — send a chat message to the copilot. */
|
||||||
|
export const COPILOT_CHAT_API_PATH = '/api/copilot/chat'
|
||||||
|
|
||||||
|
/** GET — resume/replay a copilot SSE stream. */
|
||||||
|
export const COPILOT_CHAT_STREAM_API_PATH = '/api/copilot/chat/stream'
|
||||||
|
|
||||||
|
/** POST — persist chat messages / plan artifact / config. */
|
||||||
|
export const COPILOT_UPDATE_MESSAGES_API_PATH = '/api/copilot/chat/update-messages'
|
||||||
|
|
||||||
|
/** DELETE — delete a copilot chat. */
|
||||||
|
export const COPILOT_DELETE_CHAT_API_PATH = '/api/copilot/chat/delete'
|
||||||
|
|
||||||
|
/** POST — confirm or reject a tool call. */
|
||||||
|
export const COPILOT_CONFIRM_API_PATH = '/api/copilot/confirm'
|
||||||
|
|
||||||
|
/** POST — forward diff-accepted/rejected stats to the copilot backend. */
|
||||||
|
export const COPILOT_STATS_API_PATH = '/api/copilot/stats'
|
||||||
|
|
||||||
|
/** GET — load checkpoints for a chat. */
|
||||||
|
export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
||||||
|
|
||||||
|
/** POST — revert to a checkpoint. */
|
||||||
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
|
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||||
|
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||||
|
|
||||||
|
/** GET — fetch user credentials for masking. */
|
||||||
|
export const COPILOT_CREDENTIALS_API_PATH = '/api/copilot/credentials'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Dedup limits
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum entries in the in-memory SSE tool-event dedup cache. */
|
||||||
|
export const STREAM_BUFFER_MAX_DEDUP_ENTRIES = 1_000
|
||||||
|
|||||||
129
apps/sim/lib/copilot/messages/checkpoints.ts
Normal file
129
apps/sim/lib/copilot/messages/checkpoints.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_CHECKPOINTS_API_PATH } from '@/lib/copilot/constants'
|
||||||
|
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||||
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
import type { CopilotMessage, CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessageCheckpoints')
|
||||||
|
|
||||||
|
export function buildCheckpointWorkflowState(workflowId: string): WorkflowState | null {
|
||||||
|
const rawState = useWorkflowStore.getState().getWorkflowState()
|
||||||
|
if (!rawState) return null
|
||||||
|
|
||||||
|
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, workflowId)
|
||||||
|
|
||||||
|
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
|
||||||
|
(acc, [blockId, block]) => {
|
||||||
|
if (block?.type && block?.name) {
|
||||||
|
acc[blockId] = {
|
||||||
|
...block,
|
||||||
|
id: block.id || blockId,
|
||||||
|
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||||
|
horizontalHandles: block.horizontalHandles !== undefined ? block.horizontalHandles : true,
|
||||||
|
height: block.height !== undefined ? block.height : 90,
|
||||||
|
subBlocks: block.subBlocks ?? {},
|
||||||
|
outputs: block.outputs ?? {},
|
||||||
|
data: block.data ?? {},
|
||||||
|
position: block.position || { x: 0, y: 0 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{} as WorkflowState['blocks']
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
blocks: filteredBlocks,
|
||||||
|
edges: rawState.edges ?? [],
|
||||||
|
loops: rawState.loops ?? {},
|
||||||
|
parallels: rawState.parallels ?? {},
|
||||||
|
lastSaved: rawState.lastSaved || Date.now(),
|
||||||
|
deploymentStatuses: rawState.deploymentStatuses ?? {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveMessageCheckpoint(
|
||||||
|
messageId: string,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: (partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)) => void
|
||||||
|
): Promise<boolean> {
|
||||||
|
const { workflowId, currentChat, messageSnapshots, messageCheckpoints } = get()
|
||||||
|
if (!workflowId || !currentChat?.id) return false
|
||||||
|
|
||||||
|
const snapshot = messageSnapshots[messageId]
|
||||||
|
if (!snapshot) return false
|
||||||
|
|
||||||
|
const nextSnapshots = { ...messageSnapshots }
|
||||||
|
delete nextSnapshots[messageId]
|
||||||
|
set({ messageSnapshots: nextSnapshots })
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(COPILOT_CHECKPOINTS_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowId,
|
||||||
|
chatId: currentChat.id,
|
||||||
|
messageId,
|
||||||
|
workflowState: JSON.stringify(snapshot),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json()
|
||||||
|
const newCheckpoint = result.checkpoint
|
||||||
|
if (newCheckpoint) {
|
||||||
|
const existingCheckpoints = messageCheckpoints[messageId] ?? []
|
||||||
|
const updatedCheckpoints = {
|
||||||
|
...messageCheckpoints,
|
||||||
|
[messageId]: [newCheckpoint, ...existingCheckpoints],
|
||||||
|
}
|
||||||
|
set({ messageCheckpoints: updatedCheckpoints })
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to create checkpoint from snapshot:', error)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolCallsRecursively(
|
||||||
|
toolCall: CopilotToolCall,
|
||||||
|
map: Record<string, CopilotToolCall>
|
||||||
|
): void {
|
||||||
|
if (!toolCall?.id) return
|
||||||
|
map[toolCall.id] = toolCall
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||||
|
for (const block of toolCall.subAgentBlocks) {
|
||||||
|
if (block?.type === 'subagent_tool_call' && block.toolCall?.id) {
|
||||||
|
extractToolCallsRecursively(block.toolCall, map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||||
|
for (const subTc of toolCall.subAgentToolCalls) {
|
||||||
|
extractToolCallsRecursively(subTc, map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildToolCallsById(messages: CopilotMessage[]): Record<string, CopilotToolCall> {
|
||||||
|
const toolCallsById: Record<string, CopilotToolCall> = {}
|
||||||
|
for (const msg of messages) {
|
||||||
|
if (msg.contentBlocks) {
|
||||||
|
for (const block of msg.contentBlocks) {
|
||||||
|
if (block?.type === 'tool_call' && block.toolCall?.id) {
|
||||||
|
extractToolCallsRecursively(block.toolCall, toolCallsById)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return toolCallsById
|
||||||
|
}
|
||||||
31
apps/sim/lib/copilot/messages/credential-masking.ts
Normal file
31
apps/sim/lib/copilot/messages/credential-masking.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
export function maskCredentialIdsInValue<T>(value: T, credentialIds: Set<string>): T {
|
||||||
|
if (!value || credentialIds.size === 0) return value
|
||||||
|
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
let masked = value as string
|
||||||
|
const sortedIds = Array.from(credentialIds).sort((a, b) => b.length - a.length)
|
||||||
|
for (const id of sortedIds) {
|
||||||
|
if (id && masked.includes(id)) {
|
||||||
|
masked = masked.split(id).join('••••••••')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return masked as unknown as T
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map((item) => maskCredentialIdsInValue(item, credentialIds)) as T
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === 'object') {
|
||||||
|
const masked: Record<string, unknown> = {}
|
||||||
|
for (const key of Object.keys(value as Record<string, unknown>)) {
|
||||||
|
masked[key] = maskCredentialIdsInValue(
|
||||||
|
(value as Record<string, unknown>)[key],
|
||||||
|
credentialIds
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return masked as T
|
||||||
|
}
|
||||||
|
|
||||||
|
return value
|
||||||
|
}
|
||||||
4
apps/sim/lib/copilot/messages/index.ts
Normal file
4
apps/sim/lib/copilot/messages/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from './credential-masking'
|
||||||
|
export * from './serialization'
|
||||||
|
export * from './checkpoints'
|
||||||
|
export * from './persist'
|
||||||
43
apps/sim/lib/copilot/messages/persist.ts
Normal file
43
apps/sim/lib/copilot/messages/persist.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_UPDATE_MESSAGES_API_PATH } from '@/lib/copilot/constants'
|
||||||
|
import type { CopilotMessage } from '@/stores/panel/copilot/types'
|
||||||
|
import { serializeMessagesForDB } from './serialization'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessagePersistence')
|
||||||
|
|
||||||
|
export async function persistMessages(params: {
|
||||||
|
chatId: string
|
||||||
|
messages: CopilotMessage[]
|
||||||
|
sensitiveCredentialIds?: Set<string>
|
||||||
|
planArtifact?: string | null
|
||||||
|
mode?: string
|
||||||
|
model?: string
|
||||||
|
conversationId?: string
|
||||||
|
}): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const dbMessages = serializeMessagesForDB(
|
||||||
|
params.messages,
|
||||||
|
params.sensitiveCredentialIds ?? new Set<string>()
|
||||||
|
)
|
||||||
|
const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
chatId: params.chatId,
|
||||||
|
messages: dbMessages,
|
||||||
|
...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}),
|
||||||
|
...(params.mode || params.model
|
||||||
|
? { config: { mode: params.mode, model: params.model } }
|
||||||
|
: {}),
|
||||||
|
...(params.conversationId ? { conversationId: params.conversationId } : {}),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
return response.ok
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to persist messages', {
|
||||||
|
chatId: params.chatId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
172
apps/sim/lib/copilot/messages/serialization.ts
Normal file
172
apps/sim/lib/copilot/messages/serialization.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { CopilotMessage, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { maskCredentialIdsInValue } from './credential-masking'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessageSerialization')
|
||||||
|
|
||||||
|
export function clearStreamingFlags(toolCall: CopilotToolCall): void {
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
toolCall.subAgentStreaming = false
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||||
|
for (const block of toolCall.subAgentBlocks) {
|
||||||
|
if (block?.type === 'subagent_tool_call' && block.toolCall) {
|
||||||
|
clearStreamingFlags(block.toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||||
|
for (const subTc of toolCall.subAgentToolCalls) {
|
||||||
|
clearStreamingFlags(subTc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||||
|
try {
|
||||||
|
for (const message of messages) {
|
||||||
|
if (message.role === 'assistant') {
|
||||||
|
logger.debug('[normalizeMessagesForUI] Loading assistant message', {
|
||||||
|
id: message.id,
|
||||||
|
hasContent: !!message.content?.trim(),
|
||||||
|
contentBlockCount: message.contentBlocks?.length || 0,
|
||||||
|
contentBlockTypes: message.contentBlocks?.map((b) => b?.type) ?? [],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const message of messages) {
|
||||||
|
if (message.contentBlocks) {
|
||||||
|
for (const block of message.contentBlocks) {
|
||||||
|
if (block?.type === 'tool_call' && block.toolCall) {
|
||||||
|
clearStreamingFlags(block.toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (message.toolCalls) {
|
||||||
|
for (const toolCall of message.toolCalls) {
|
||||||
|
clearStreamingFlags(toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return messages
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[normalizeMessagesForUI] Failed to normalize messages', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deepClone<T>(obj: T): T {
|
||||||
|
try {
|
||||||
|
const json = JSON.stringify(obj)
|
||||||
|
if (!json || json === 'undefined') {
|
||||||
|
logger.warn('[deepClone] JSON.stringify returned empty for object', {
|
||||||
|
type: typeof obj,
|
||||||
|
isArray: Array.isArray(obj),
|
||||||
|
length: Array.isArray(obj) ? obj.length : undefined,
|
||||||
|
})
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
const parsed = JSON.parse(json)
|
||||||
|
if (Array.isArray(obj) && (!Array.isArray(parsed) || parsed.length !== obj.length)) {
|
||||||
|
logger.warn('[deepClone] Array clone mismatch', {
|
||||||
|
originalLength: obj.length,
|
||||||
|
clonedLength: Array.isArray(parsed) ? parsed.length : 'not array',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return parsed
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[deepClone] Failed to clone object', {
|
||||||
|
error: String(err),
|
||||||
|
type: typeof obj,
|
||||||
|
isArray: Array.isArray(obj),
|
||||||
|
})
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serializeMessagesForDB(
|
||||||
|
messages: CopilotMessage[],
|
||||||
|
credentialIds: Set<string>
|
||||||
|
): CopilotMessage[] {
|
||||||
|
const result = messages
|
||||||
|
.map((msg) => {
|
||||||
|
let timestamp: string = msg.timestamp
|
||||||
|
if (typeof timestamp !== 'string') {
|
||||||
|
const ts = timestamp as unknown
|
||||||
|
timestamp = ts instanceof Date ? ts.toISOString() : new Date().toISOString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const serialized: CopilotMessage = {
|
||||||
|
id: msg.id,
|
||||||
|
role: msg.role,
|
||||||
|
content: msg.content || '',
|
||||||
|
timestamp,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0) {
|
||||||
|
serialized.contentBlocks = deepClone(msg.contentBlocks)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0) {
|
||||||
|
serialized.toolCalls = deepClone(msg.toolCalls)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.fileAttachments) && msg.fileAttachments.length > 0) {
|
||||||
|
serialized.fileAttachments = deepClone(msg.fileAttachments)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.contexts) && msg.contexts.length > 0) {
|
||||||
|
serialized.contexts = deepClone(msg.contexts)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.citations) && msg.citations.length > 0) {
|
||||||
|
serialized.citations = deepClone(msg.citations)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.errorType) {
|
||||||
|
serialized.errorType = msg.errorType
|
||||||
|
}
|
||||||
|
|
||||||
|
return maskCredentialIdsInValue(serialized, credentialIds)
|
||||||
|
})
|
||||||
|
.filter((msg) => {
|
||||||
|
if (msg.role === 'assistant') {
|
||||||
|
const hasContent = typeof msg.content === 'string' && msg.content.trim().length > 0
|
||||||
|
const hasTools = Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0
|
||||||
|
const hasBlocks = Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0
|
||||||
|
return hasContent || hasTools || hasBlocks
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const msg of messages) {
|
||||||
|
if (msg.role === 'assistant') {
|
||||||
|
logger.debug('[serializeMessagesForDB] Input assistant message', {
|
||||||
|
id: msg.id,
|
||||||
|
hasContent: !!msg.content?.trim(),
|
||||||
|
contentBlockCount: msg.contentBlocks?.length || 0,
|
||||||
|
contentBlockTypes: msg.contentBlocks?.map((b) => b?.type) ?? [],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('[serializeMessagesForDB] Serialized messages', {
|
||||||
|
inputCount: messages.length,
|
||||||
|
outputCount: result.length,
|
||||||
|
sample:
|
||||||
|
result.length > 0
|
||||||
|
? {
|
||||||
|
role: result[result.length - 1].role,
|
||||||
|
hasContent: !!result[result.length - 1].content,
|
||||||
|
contentBlockCount: result[result.length - 1].contentBlocks?.length || 0,
|
||||||
|
toolCallCount: result[result.length - 1].toolCalls?.length || 0,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
63
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
63
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
export const INTERRUPT_TOOL_NAMES = [
|
||||||
|
'set_global_workflow_variables',
|
||||||
|
'run_workflow',
|
||||||
|
'manage_mcp_tool',
|
||||||
|
'manage_custom_tool',
|
||||||
|
'deploy_mcp',
|
||||||
|
'deploy_chat',
|
||||||
|
'deploy_api',
|
||||||
|
'create_workspace_mcp_server',
|
||||||
|
'set_environment_variables',
|
||||||
|
'make_api_request',
|
||||||
|
'oauth_request_access',
|
||||||
|
'navigate_ui',
|
||||||
|
'knowledge_base',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_NAMES = [
|
||||||
|
'debug',
|
||||||
|
'edit',
|
||||||
|
'build',
|
||||||
|
'plan',
|
||||||
|
'test',
|
||||||
|
'deploy',
|
||||||
|
'auth',
|
||||||
|
'research',
|
||||||
|
'knowledge',
|
||||||
|
'custom_tool',
|
||||||
|
'tour',
|
||||||
|
'info',
|
||||||
|
'workflow',
|
||||||
|
'evaluate',
|
||||||
|
'superagent',
|
||||||
|
'discovery',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Respond tools are internal to the copilot's subagent system.
|
||||||
|
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||||
|
* The copilot backend handles these internally.
|
||||||
|
*/
|
||||||
|
export const RESPOND_TOOL_NAMES = [
|
||||||
|
'plan_respond',
|
||||||
|
'edit_respond',
|
||||||
|
'build_respond',
|
||||||
|
'debug_respond',
|
||||||
|
'info_respond',
|
||||||
|
'research_respond',
|
||||||
|
'deploy_respond',
|
||||||
|
'superagent_respond',
|
||||||
|
'discovery_respond',
|
||||||
|
'tour_respond',
|
||||||
|
'auth_respond',
|
||||||
|
'workflow_respond',
|
||||||
|
'knowledge_respond',
|
||||||
|
'custom_tool_respond',
|
||||||
|
'test_respond',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||||
70
apps/sim/lib/copilot/orchestrator/index.ts
Normal file
70
apps/sim/lib/copilot/orchestrator/index.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type { OrchestratorOptions, OrchestratorResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotOrchestrator')
|
||||||
|
|
||||||
|
export interface OrchestrateStreamOptions extends OrchestratorOptions {
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
chatId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function orchestrateCopilotStream(
|
||||||
|
requestPayload: Record<string, unknown>,
|
||||||
|
options: OrchestrateStreamOptions
|
||||||
|
): Promise<OrchestratorResult> {
|
||||||
|
const { userId, workflowId, chatId } = options
|
||||||
|
const execContext = await prepareExecutionContext(userId, workflowId)
|
||||||
|
|
||||||
|
const payloadMsgId = requestPayload?.messageId
|
||||||
|
const context = createStreamingContext({
|
||||||
|
chatId,
|
||||||
|
messageId: typeof payloadMsgId === 'string' ? payloadMsgId : crypto.randomUUID(),
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runStreamLoop(
|
||||||
|
`${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
},
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
options
|
||||||
|
)
|
||||||
|
|
||||||
|
const result: OrchestratorResult = {
|
||||||
|
success: context.errors.length === 0,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
contentBlocks: context.contentBlocks,
|
||||||
|
toolCalls: buildToolCallSummaries(context),
|
||||||
|
chatId: context.chatId,
|
||||||
|
conversationId: context.conversationId,
|
||||||
|
errors: context.errors.length ? context.errors : undefined,
|
||||||
|
}
|
||||||
|
await options.onComplete?.(result)
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
const err = error instanceof Error ? error : new Error('Copilot orchestration failed')
|
||||||
|
logger.error('Copilot orchestration failed', { error: err.message })
|
||||||
|
await options.onError?.(err)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
content: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: [],
|
||||||
|
chatId: context.chatId,
|
||||||
|
conversationId: context.conversationId,
|
||||||
|
error: err.message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
29
apps/sim/lib/copilot/orchestrator/persistence.ts
Normal file
29
apps/sim/lib/copilot/orchestrator/persistence.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { REDIS_TOOL_CALL_PREFIX } from '@/lib/copilot/constants'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotOrchestratorPersistence')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a tool call confirmation status from Redis.
|
||||||
|
*/
|
||||||
|
export async function getToolConfirmation(toolCallId: string): Promise<{
|
||||||
|
status: string
|
||||||
|
message?: string
|
||||||
|
timestamp?: string
|
||||||
|
} | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return null
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = await redis.get(`${REDIS_TOOL_CALL_PREFIX}${toolCallId}`)
|
||||||
|
if (!data) return null
|
||||||
|
return JSON.parse(data) as { status: string; message?: string; timestamp?: string }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to read tool confirmation', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
95
apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts
Normal file
95
apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { loggerMock } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|
||||||
|
const executeToolServerSide = vi.fn()
|
||||||
|
const markToolComplete = vi.fn()
|
||||||
|
|
||||||
|
vi.mock('@/lib/copilot/orchestrator/tool-executor', () => ({
|
||||||
|
executeToolServerSide,
|
||||||
|
markToolComplete,
|
||||||
|
}))
|
||||||
|
|
||||||
|
import { sseHandlers } from '@/lib/copilot/orchestrator/sse-handlers'
|
||||||
|
import type { ExecutionContext, StreamingContext } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
describe('sse-handlers tool lifecycle', () => {
|
||||||
|
let context: StreamingContext
|
||||||
|
let execContext: ExecutionContext
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
context = {
|
||||||
|
chatId: undefined,
|
||||||
|
conversationId: undefined,
|
||||||
|
messageId: 'msg-1',
|
||||||
|
accumulatedContent: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: new Map(),
|
||||||
|
currentThinkingBlock: null,
|
||||||
|
isInThinkingBlock: false,
|
||||||
|
subAgentParentToolCallId: undefined,
|
||||||
|
subAgentContent: {},
|
||||||
|
subAgentToolCalls: {},
|
||||||
|
pendingContent: '',
|
||||||
|
streamComplete: false,
|
||||||
|
wasAborted: false,
|
||||||
|
errors: [],
|
||||||
|
}
|
||||||
|
execContext = {
|
||||||
|
userId: 'user-1',
|
||||||
|
workflowId: 'workflow-1',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('executes tool_call and emits tool_result + mark-complete', async () => {
|
||||||
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
const onEvent = vi.fn()
|
||||||
|
|
||||||
|
await sseHandlers.tool_call(
|
||||||
|
{
|
||||||
|
type: 'tool_call',
|
||||||
|
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
{ onEvent, interactive: false, timeout: 1000 }
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
expect(onEvent).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: 'tool-1',
|
||||||
|
success: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const updated = context.toolCalls.get('tool-1')
|
||||||
|
expect(updated?.status).toBe('success')
|
||||||
|
expect(updated?.result?.output).toEqual({ ok: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('skips duplicate tool_call after result', async () => {
|
||||||
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
type: 'tool_call',
|
||||||
|
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
|
}
|
||||||
|
|
||||||
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
|
|
||||||
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
407
apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts
Normal file
407
apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts
Normal file
@@ -0,0 +1,407 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
getEventData,
|
||||||
|
markToolResultSeen,
|
||||||
|
wasToolResultSeen,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ContentBlock,
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { executeToolAndReport, isInterruptToolName, waitForToolDecision } from './tool-execution'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseHandlers')
|
||||||
|
|
||||||
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
|
success: boolean
|
||||||
|
hasResultData: boolean
|
||||||
|
hasError: boolean
|
||||||
|
} {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
const hasExplicitSuccess = data?.success !== undefined || resultObj.success !== undefined
|
||||||
|
const explicitSuccess = data?.success ?? resultObj.success
|
||||||
|
const hasResultData = data?.result !== undefined || data?.data !== undefined
|
||||||
|
const hasError = !!data?.error || !!resultObj.error
|
||||||
|
const success = hasExplicitSuccess ? !!explicitSuccess : hasResultData && !hasError
|
||||||
|
return { success, hasResultData, hasError }
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SSEHandler = (
|
||||||
|
event: SSEEvent,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: OrchestratorOptions
|
||||||
|
) => void | Promise<void>
|
||||||
|
|
||||||
|
function addContentBlock(context: StreamingContext, block: Omit<ContentBlock, 'timestamp'>): void {
|
||||||
|
context.contentBlocks.push({
|
||||||
|
...block,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const sseHandlers: Record<string, SSEHandler> = {
|
||||||
|
chat_id: (event, context) => {
|
||||||
|
context.chatId = asRecord(event.data).chatId as string | undefined
|
||||||
|
},
|
||||||
|
title_updated: () => {},
|
||||||
|
tool_result: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
const current = context.toolCalls.get(toolCallId)
|
||||||
|
if (!current) return
|
||||||
|
|
||||||
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
|
current.status = success ? 'success' : 'error'
|
||||||
|
current.endTime = Date.now()
|
||||||
|
if (hasResultData) {
|
||||||
|
current.result = {
|
||||||
|
success,
|
||||||
|
output: data?.result || data?.data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
current.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_error: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
const current = context.toolCalls.get(toolCallId)
|
||||||
|
if (!current) return
|
||||||
|
current.status = 'error'
|
||||||
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
|
current.endTime = Date.now()
|
||||||
|
},
|
||||||
|
tool_generating: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.toolCallId as string | undefined) || (data?.id as string | undefined)
|
||||||
|
const toolName = event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
if (!context.toolCalls.has(toolCallId)) {
|
||||||
|
context.toolCalls.set(toolCallId, {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
startTime: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: async (event, context, execContext, options) => {
|
||||||
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
|
||||||
|
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record<string, unknown> | undefined
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const existing = context.toolCalls.get(toolCallId)
|
||||||
|
|
||||||
|
// If we've already completed this tool call, ignore late/duplicate tool_call events
|
||||||
|
// to avoid resetting UI/state back to pending and re-executing.
|
||||||
|
if (
|
||||||
|
existing?.endTime ||
|
||||||
|
(existing && existing.status !== 'pending' && existing.status !== 'executing')
|
||||||
|
) {
|
||||||
|
if (!existing.params && args) {
|
||||||
|
existing.params = args
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
if (args && !existing.params) existing.params = args
|
||||||
|
} else {
|
||||||
|
context.toolCalls.set(toolCallId, {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
params: args,
|
||||||
|
startTime: Date.now(),
|
||||||
|
})
|
||||||
|
const created = context.toolCalls.get(toolCallId)!
|
||||||
|
addContentBlock(context, { type: 'tool_call', toolCall: created })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPartial) return
|
||||||
|
if (wasToolResultSeen(toolCallId)) return
|
||||||
|
|
||||||
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
// Subagent tools are executed by the copilot backend, not sim side.
|
||||||
|
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
|
// The copilot backend handles these internally to signal subagent completion.
|
||||||
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isInterruptTool = isInterruptToolName(toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
|
||||||
|
if (isInterruptTool && isInteractive) {
|
||||||
|
const decision = await waitForToolDecision(toolCallId, options.timeout || STREAM_TIMEOUT_MS, options.abortSignal)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
await markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
)
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
await options.onEvent?.({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: false,
|
||||||
|
result: { skipped: true, reason: 'user_rejected' },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
await markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
)
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
await options.onEvent?.({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: true,
|
||||||
|
result: { background: true },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
reasoning: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
const phase = d.phase || asRecord(d.data).phase
|
||||||
|
if (phase === 'start') {
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentThinkingBlock = {
|
||||||
|
type: 'thinking',
|
||||||
|
content: '',
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (phase === 'end') {
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.contentBlocks.push(context.currentThinkingBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = false
|
||||||
|
context.currentThinkingBlock = null
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const chunk = (d.data || d.content || event.content) as string | undefined
|
||||||
|
if (!chunk || !context.currentThinkingBlock) return
|
||||||
|
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
||||||
|
},
|
||||||
|
content: (event, context) => {
|
||||||
|
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
||||||
|
let chunk: string | undefined
|
||||||
|
if (typeof event.data === 'string') {
|
||||||
|
chunk = event.data
|
||||||
|
} else {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
chunk = (d.content || d.data || event.content) as string | undefined
|
||||||
|
}
|
||||||
|
if (!chunk) return
|
||||||
|
context.accumulatedContent += chunk
|
||||||
|
addContentBlock(context, { type: 'text', content: chunk })
|
||||||
|
},
|
||||||
|
done: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
if (d.responseId) {
|
||||||
|
context.conversationId = d.responseId as string
|
||||||
|
}
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
start: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
if (d.responseId) {
|
||||||
|
context.conversationId = d.responseId as string
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
const message = (d.message || d.error || event.error) as string | undefined
|
||||||
|
if (message) {
|
||||||
|
context.errors.push(message)
|
||||||
|
}
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||||
|
content: (event, context) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId || !event.data) return
|
||||||
|
// Go backend sends content as a plain string in event.data
|
||||||
|
let chunk: string | undefined
|
||||||
|
if (typeof event.data === 'string') {
|
||||||
|
chunk = event.data
|
||||||
|
} else {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
chunk = (d.content || d.data || event.content) as string | undefined
|
||||||
|
}
|
||||||
|
if (!chunk) return
|
||||||
|
context.subAgentContent[parentToolCallId] =
|
||||||
|
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||||
|
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||||
|
},
|
||||||
|
tool_call: async (event, context, execContext, options) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as Record<string, unknown> | undefined
|
||||||
|
|
||||||
|
const existing = context.toolCalls.get(toolCallId)
|
||||||
|
// Ignore late/duplicate tool_call events once we already have a result.
|
||||||
|
if (wasToolResultSeen(toolCallId) || existing?.endTime) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolCall: ToolCallState = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
params: args,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store in both places - but do NOT overwrite existing tool call state for the same id.
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId].some((tc) => tc.id === toolCallId)) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId].push(toolCall)
|
||||||
|
}
|
||||||
|
if (!context.toolCalls.has(toolCallId)) {
|
||||||
|
context.toolCalls.set(toolCallId, toolCall)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPartial) return
|
||||||
|
|
||||||
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_result: (event, context) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
|
||||||
|
// Update in subAgentToolCalls.
|
||||||
|
const toolCalls = context.subAgentToolCalls[parentToolCallId] || []
|
||||||
|
const subAgentToolCall = toolCalls.find((tc) => tc.id === toolCallId)
|
||||||
|
|
||||||
|
// Also update in main toolCalls (where we added it for execution).
|
||||||
|
const mainToolCall = context.toolCalls.get(toolCallId)
|
||||||
|
|
||||||
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
|
const status = success ? 'success' : 'error'
|
||||||
|
const endTime = Date.now()
|
||||||
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
|
if (subAgentToolCall) {
|
||||||
|
subAgentToolCall.status = status
|
||||||
|
subAgentToolCall.endTime = endTime
|
||||||
|
if (result) subAgentToolCall.result = result
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
subAgentToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mainToolCall) {
|
||||||
|
mainToolCall.status = status
|
||||||
|
mainToolCall.endTime = endTime
|
||||||
|
if (result) mainToolCall.result = result
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
mainToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
||||||
|
if (!event.subagent) return false
|
||||||
|
if (!context.subAgentParentToolCallId) {
|
||||||
|
logger.warn('Subagent event missing parent tool call', {
|
||||||
|
type: event.type,
|
||||||
|
subagent: event.subagent,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
2
apps/sim/lib/copilot/orchestrator/sse-handlers/index.ts
Normal file
2
apps/sim/lib/copilot/orchestrator/sse-handlers/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export type { SSEHandler } from './handlers'
|
||||||
|
export { handleSubagentRouting, sseHandlers, subAgentHandlers } from './handlers'
|
||||||
127
apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts
Normal file
127
apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import {
|
||||||
|
TOOL_DECISION_INITIAL_POLL_MS,
|
||||||
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
|
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
markToolResultSeen,
|
||||||
|
wasToolResultSeen,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { executeToolServerSide, markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
|
export function isInterruptToolName(toolName: string): boolean {
|
||||||
|
return INTERRUPT_TOOL_SET.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeToolAndReport(
|
||||||
|
toolCallId: string,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options?: OrchestratorOptions
|
||||||
|
): Promise<void> {
|
||||||
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
if (toolCall.status === 'executing') return
|
||||||
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
try {
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
toolCall.status = result.success ? 'success' : 'error'
|
||||||
|
toolCall.result = result
|
||||||
|
toolCall.error = result.error
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
|
||||||
|
// If create_workflow was successful, update the execution context with the new workflowId.
|
||||||
|
// This ensures subsequent tools in the same stream have access to the workflowId.
|
||||||
|
const output = asRecord(result.output)
|
||||||
|
if (
|
||||||
|
toolCall.name === 'create_workflow' &&
|
||||||
|
result.success &&
|
||||||
|
output.workflowId &&
|
||||||
|
!execContext.workflowId
|
||||||
|
) {
|
||||||
|
execContext.workflowId = output.workflowId as string
|
||||||
|
if (output.workspaceId) {
|
||||||
|
execContext.workspaceId = output.workspaceId as string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
|
||||||
|
await markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
result.success ? 200 : 500,
|
||||||
|
result.error || (result.success ? 'Tool completed' : 'Tool failed'),
|
||||||
|
result.output
|
||||||
|
)
|
||||||
|
|
||||||
|
const resultEvent: SSEEvent = {
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
success: result.success,
|
||||||
|
result: result.output,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: result.success,
|
||||||
|
result: result.output,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await options?.onEvent?.(resultEvent)
|
||||||
|
} catch (error) {
|
||||||
|
toolCall.status = 'error'
|
||||||
|
toolCall.error = error instanceof Error ? error.message : String(error)
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
|
||||||
|
await markToolComplete(toolCall.id, toolCall.name, 500, toolCall.error)
|
||||||
|
|
||||||
|
const errorEvent: SSEEvent = {
|
||||||
|
type: 'tool_error',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
error: toolCall.error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await options?.onEvent?.(errorEvent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function waitForToolDecision(
|
||||||
|
toolCallId: string,
|
||||||
|
timeoutMs: number,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<{ status: string; message?: string } | null> {
|
||||||
|
const start = Date.now()
|
||||||
|
let interval = TOOL_DECISION_INITIAL_POLL_MS
|
||||||
|
const maxInterval = TOOL_DECISION_MAX_POLL_MS
|
||||||
|
while (Date.now() - start < timeoutMs) {
|
||||||
|
if (abortSignal?.aborted) return null
|
||||||
|
const decision = await getToolConfirmation(toolCallId)
|
||||||
|
if (decision?.status) {
|
||||||
|
return decision
|
||||||
|
}
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, interval))
|
||||||
|
interval = Math.min(interval * TOOL_DECISION_POLL_BACKOFF, maxInterval)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
71
apps/sim/lib/copilot/orchestrator/sse-parser.ts
Normal file
71
apps/sim/lib/copilot/orchestrator/sse-parser.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseParser')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses SSE streams from the copilot backend into typed events.
|
||||||
|
*/
|
||||||
|
export async function* parseSSEStream(
|
||||||
|
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||||
|
decoder: TextDecoder,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): AsyncGenerator<SSEEvent> {
|
||||||
|
let buffer = ''
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
if (abortSignal?.aborted) {
|
||||||
|
logger.info('SSE stream aborted by signal')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const { done, value } = await reader.read()
|
||||||
|
if (done) break
|
||||||
|
|
||||||
|
buffer += decoder.decode(value, { stream: true })
|
||||||
|
const lines = buffer.split('\n')
|
||||||
|
buffer = lines.pop() || ''
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (!line.trim()) continue
|
||||||
|
if (!line.startsWith('data: ')) continue
|
||||||
|
|
||||||
|
const jsonStr = line.slice(6)
|
||||||
|
if (jsonStr === '[DONE]') continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const event = JSON.parse(jsonStr) as SSEEvent
|
||||||
|
if (event?.type) {
|
||||||
|
yield event
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse SSE event', {
|
||||||
|
preview: jsonStr.slice(0, 200),
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buffer.trim() && buffer.startsWith('data: ')) {
|
||||||
|
try {
|
||||||
|
const event = JSON.parse(buffer.slice(6)) as SSEEvent
|
||||||
|
if (event?.type) {
|
||||||
|
yield event
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse final SSE buffer', {
|
||||||
|
preview: buffer.slice(0, 200),
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
reader.releaseLock()
|
||||||
|
} catch {
|
||||||
|
logger.warn('Failed to release SSE reader lock')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
42
apps/sim/lib/copilot/orchestrator/sse-utils.test.ts
Normal file
42
apps/sim/lib/copilot/orchestrator/sse-utils.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
import { describe, expect, it } from 'vitest'
|
||||||
|
import {
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
|
||||||
|
describe('sse-utils', () => {
|
||||||
|
it.concurrent('normalizes tool fields from string data', () => {
|
||||||
|
const event = {
|
||||||
|
type: 'tool_result',
|
||||||
|
data: JSON.stringify({
|
||||||
|
id: 'tool_1',
|
||||||
|
name: 'edit_workflow',
|
||||||
|
success: true,
|
||||||
|
result: { ok: true },
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
|
expect(normalized.toolName).toBe('edit_workflow')
|
||||||
|
expect(normalized.success).toBe(true)
|
||||||
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes tool_call events', () => {
|
||||||
|
const event = { type: 'tool_call', data: { id: 'tool_call_1', name: 'plan' } }
|
||||||
|
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
||||||
|
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes tool_result events', () => {
|
||||||
|
const event = { type: 'tool_result', data: { id: 'tool_result_1', name: 'plan' } }
|
||||||
|
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
||||||
|
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
120
apps/sim/lib/copilot/orchestrator/sse-utils.ts
Normal file
120
apps/sim/lib/copilot/orchestrator/sse-utils.ts
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import { STREAM_BUFFER_MAX_DEDUP_ENTRIES } from '@/lib/copilot/constants'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
type EventDataObject = Record<string, unknown> | undefined
|
||||||
|
|
||||||
|
/** Safely cast event.data to a record for property access. */
|
||||||
|
export const asRecord = (data: unknown): Record<string, unknown> =>
|
||||||
|
(data && typeof data === 'object' && !Array.isArray(data) ? data : {}) as Record<string, unknown>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In-memory tool event dedupe with bounded size.
|
||||||
|
*
|
||||||
|
* NOTE: Process-local only. In a multi-instance setup (e.g., ECS),
|
||||||
|
* each task maintains its own dedupe cache.
|
||||||
|
*/
|
||||||
|
const seenToolCalls = new Set<string>()
|
||||||
|
const seenToolResults = new Set<string>()
|
||||||
|
|
||||||
|
function addToSet(set: Set<string>, id: string): void {
|
||||||
|
if (set.size >= STREAM_BUFFER_MAX_DEDUP_ENTRIES) {
|
||||||
|
const first = set.values().next().value
|
||||||
|
if (first) set.delete(first)
|
||||||
|
}
|
||||||
|
set.add(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseEventData = (data: unknown): EventDataObject => {
|
||||||
|
if (!data) return undefined
|
||||||
|
if (typeof data !== 'string') {
|
||||||
|
return data as EventDataObject
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return JSON.parse(data) as EventDataObject
|
||||||
|
} catch {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasToolFields = (data: EventDataObject): boolean => {
|
||||||
|
if (!data) return false
|
||||||
|
return (
|
||||||
|
data.id !== undefined ||
|
||||||
|
data.toolCallId !== undefined ||
|
||||||
|
data.name !== undefined ||
|
||||||
|
data.success !== undefined ||
|
||||||
|
data.result !== undefined ||
|
||||||
|
data.arguments !== undefined
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getEventData = (event: SSEEvent): EventDataObject => {
|
||||||
|
const topLevel = parseEventData(event.data)
|
||||||
|
if (!topLevel) return undefined
|
||||||
|
if (hasToolFields(topLevel)) return topLevel
|
||||||
|
const nested = parseEventData(topLevel.data)
|
||||||
|
return nested || topLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
function getToolCallIdFromEvent(event: SSEEvent): string | undefined {
|
||||||
|
const data = getEventData(event)
|
||||||
|
return event.toolCallId || (data?.id as string | undefined) || (data?.toolCallId as string | undefined)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Normalizes SSE events so tool metadata is available at the top level. */
|
||||||
|
export function normalizeSseEvent(event: SSEEvent): SSEEvent {
|
||||||
|
if (!event) return event
|
||||||
|
const data = getEventData(event)
|
||||||
|
if (!data) return event
|
||||||
|
const toolCallId = event.toolCallId || (data.id as string | undefined) || (data.toolCallId as string | undefined)
|
||||||
|
const toolName = event.toolName || (data.name as string | undefined) || (data.toolName as string | undefined)
|
||||||
|
const success = event.success ?? (data.success as boolean | undefined)
|
||||||
|
const result = event.result ?? data.result
|
||||||
|
const normalizedData = typeof event.data === 'string' ? data : event.data
|
||||||
|
return {
|
||||||
|
...event,
|
||||||
|
data: normalizedData,
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
success,
|
||||||
|
result,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function markToolCallSeen(toolCallId: string): void {
|
||||||
|
addToSet(seenToolCalls, toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
function wasToolCallSeen(toolCallId: string): boolean {
|
||||||
|
return seenToolCalls.has(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function markToolResultSeen(toolCallId: string): void {
|
||||||
|
addToSet(seenToolResults, toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function wasToolResultSeen(toolCallId: string): boolean {
|
||||||
|
return seenToolResults.has(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
||||||
|
if (event.type !== 'tool_call') return false
|
||||||
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
|
if (!toolCallId) return false
|
||||||
|
const eventData = getEventData(event)
|
||||||
|
if (eventData?.partial === true) return false
|
||||||
|
if (wasToolResultSeen(toolCallId) || wasToolCallSeen(toolCallId)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
markToolCallSeen(toolCallId)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
||||||
|
if (event.type !== 'tool_result') return false
|
||||||
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
|
if (!toolCallId) return false
|
||||||
|
if (wasToolResultSeen(toolCallId)) return true
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return false
|
||||||
|
}
|
||||||
119
apps/sim/lib/copilot/orchestrator/stream-buffer.test.ts
Normal file
119
apps/sim/lib/copilot/orchestrator/stream-buffer.test.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { loggerMock } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|
||||||
|
type StoredEntry = { score: number; value: string }
|
||||||
|
|
||||||
|
const createRedisStub = () => {
|
||||||
|
const events = new Map<string, StoredEntry[]>()
|
||||||
|
const counters = new Map<string, number>()
|
||||||
|
|
||||||
|
const readEntries = (key: string, min: number, max: number) => {
|
||||||
|
const list = events.get(key) || []
|
||||||
|
return list
|
||||||
|
.filter((entry) => entry.score >= min && entry.score <= max)
|
||||||
|
.sort((a, b) => a.score - b.score)
|
||||||
|
.map((entry) => entry.value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
del: vi.fn().mockResolvedValue(1),
|
||||||
|
hset: vi.fn().mockResolvedValue(1),
|
||||||
|
hgetall: vi.fn().mockResolvedValue({}),
|
||||||
|
expire: vi.fn().mockResolvedValue(1),
|
||||||
|
eval: vi
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(
|
||||||
|
(
|
||||||
|
_lua: string,
|
||||||
|
_keysCount: number,
|
||||||
|
seqKey: string,
|
||||||
|
eventsKey: string,
|
||||||
|
_ttl: number,
|
||||||
|
_limit: number,
|
||||||
|
streamId: string,
|
||||||
|
eventJson: string
|
||||||
|
) => {
|
||||||
|
const current = counters.get(seqKey) || 0
|
||||||
|
const next = current + 1
|
||||||
|
counters.set(seqKey, next)
|
||||||
|
const entry = JSON.stringify({ eventId: next, streamId, event: JSON.parse(eventJson) })
|
||||||
|
const list = events.get(eventsKey) || []
|
||||||
|
list.push({ score: next, value: entry })
|
||||||
|
events.set(eventsKey, list)
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
),
|
||||||
|
incrby: vi.fn().mockImplementation((key: string, amount: number) => {
|
||||||
|
const current = counters.get(key) || 0
|
||||||
|
const next = current + amount
|
||||||
|
counters.set(key, next)
|
||||||
|
return next
|
||||||
|
}),
|
||||||
|
zrangebyscore: vi.fn().mockImplementation((key: string, min: string, max: string) => {
|
||||||
|
const minVal = Number(min)
|
||||||
|
const maxVal = max === '+inf' ? Number.POSITIVE_INFINITY : Number(max)
|
||||||
|
return Promise.resolve(readEntries(key, minVal, maxVal))
|
||||||
|
}),
|
||||||
|
pipeline: vi.fn().mockImplementation(() => {
|
||||||
|
const api: Record<string, any> = {}
|
||||||
|
api.zadd = vi.fn().mockImplementation((key: string, ...args: Array<string | number>) => {
|
||||||
|
const list = events.get(key) || []
|
||||||
|
for (let i = 0; i < args.length; i += 2) {
|
||||||
|
list.push({ score: Number(args[i]), value: String(args[i + 1]) })
|
||||||
|
}
|
||||||
|
events.set(key, list)
|
||||||
|
return api
|
||||||
|
})
|
||||||
|
api.expire = vi.fn().mockReturnValue(api)
|
||||||
|
api.zremrangebyrank = vi.fn().mockReturnValue(api)
|
||||||
|
api.exec = vi.fn().mockResolvedValue([])
|
||||||
|
return api
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mockRedis: ReturnType<typeof createRedisStub>
|
||||||
|
|
||||||
|
vi.mock('@/lib/core/config/redis', () => ({
|
||||||
|
getRedisClient: () => mockRedis,
|
||||||
|
}))
|
||||||
|
|
||||||
|
import {
|
||||||
|
appendStreamEvent,
|
||||||
|
createStreamEventWriter,
|
||||||
|
readStreamEvents,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
|
||||||
|
describe('stream-buffer', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockRedis = createRedisStub()
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('replays events after a given event id', async () => {
|
||||||
|
await appendStreamEvent('stream-1', { type: 'content', data: 'hello' })
|
||||||
|
await appendStreamEvent('stream-1', { type: 'content', data: 'world' })
|
||||||
|
|
||||||
|
const allEvents = await readStreamEvents('stream-1', 0)
|
||||||
|
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
||||||
|
|
||||||
|
const replayed = await readStreamEvents('stream-1', 1)
|
||||||
|
expect(replayed.map((entry) => entry.event.data)).toEqual(['world'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('flushes buffered events for resume', async () => {
|
||||||
|
const writer = createStreamEventWriter('stream-2')
|
||||||
|
await writer.write({ type: 'content', data: 'a' })
|
||||||
|
await writer.write({ type: 'content', data: 'b' })
|
||||||
|
await writer.flush()
|
||||||
|
|
||||||
|
const events = await readStreamEvents('stream-2', 0)
|
||||||
|
expect(events.map((entry) => entry.event.data)).toEqual(['a', 'b'])
|
||||||
|
})
|
||||||
|
})
|
||||||
309
apps/sim/lib/copilot/orchestrator/stream-buffer.ts
Normal file
309
apps/sim/lib/copilot/orchestrator/stream-buffer.ts
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { REDIS_COPILOT_STREAM_PREFIX } from '@/lib/copilot/constants'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStreamBuffer')
|
||||||
|
|
||||||
|
const STREAM_DEFAULTS = {
|
||||||
|
ttlSeconds: 60 * 60,
|
||||||
|
eventLimit: 5000,
|
||||||
|
reserveBatch: 200,
|
||||||
|
flushIntervalMs: 15,
|
||||||
|
flushMaxBatch: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamBufferConfig = {
|
||||||
|
ttlSeconds: number
|
||||||
|
eventLimit: number
|
||||||
|
reserveBatch: number
|
||||||
|
flushIntervalMs: number
|
||||||
|
flushMaxBatch: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseNumber = (value: number | string | undefined, fallback: number): number => {
|
||||||
|
if (typeof value === 'number' && Number.isFinite(value)) return value
|
||||||
|
const parsed = Number(value)
|
||||||
|
return Number.isFinite(parsed) ? parsed : fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getStreamBufferConfig(): StreamBufferConfig {
|
||||||
|
return {
|
||||||
|
ttlSeconds: parseNumber(env.COPILOT_STREAM_TTL_SECONDS, STREAM_DEFAULTS.ttlSeconds),
|
||||||
|
eventLimit: parseNumber(env.COPILOT_STREAM_EVENT_LIMIT, STREAM_DEFAULTS.eventLimit),
|
||||||
|
reserveBatch: parseNumber(env.COPILOT_STREAM_RESERVE_BATCH, STREAM_DEFAULTS.reserveBatch),
|
||||||
|
flushIntervalMs: parseNumber(
|
||||||
|
env.COPILOT_STREAM_FLUSH_INTERVAL_MS,
|
||||||
|
STREAM_DEFAULTS.flushIntervalMs
|
||||||
|
),
|
||||||
|
flushMaxBatch: parseNumber(env.COPILOT_STREAM_FLUSH_MAX_BATCH, STREAM_DEFAULTS.flushMaxBatch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const APPEND_STREAM_EVENT_LUA = `
|
||||||
|
local seqKey = KEYS[1]
|
||||||
|
local eventsKey = KEYS[2]
|
||||||
|
local ttl = tonumber(ARGV[1])
|
||||||
|
local limit = tonumber(ARGV[2])
|
||||||
|
local streamId = ARGV[3]
|
||||||
|
local eventJson = ARGV[4]
|
||||||
|
|
||||||
|
local id = redis.call('INCR', seqKey)
|
||||||
|
local entry = '{"eventId":' .. id .. ',"streamId":' .. cjson.encode(streamId) .. ',"event":' .. eventJson .. '}'
|
||||||
|
redis.call('ZADD', eventsKey, id, entry)
|
||||||
|
redis.call('EXPIRE', eventsKey, ttl)
|
||||||
|
redis.call('EXPIRE', seqKey, ttl)
|
||||||
|
if limit > 0 then
|
||||||
|
redis.call('ZREMRANGEBYRANK', eventsKey, 0, -limit-1)
|
||||||
|
end
|
||||||
|
return id
|
||||||
|
`
|
||||||
|
|
||||||
|
function getStreamKeyPrefix(streamId: string) {
|
||||||
|
return `${REDIS_COPILOT_STREAM_PREFIX}${streamId}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEventsKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:events`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSeqKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:seq`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMetaKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:meta`
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamStatus = 'active' | 'complete' | 'error'
|
||||||
|
|
||||||
|
export type StreamMeta = {
|
||||||
|
status: StreamStatus
|
||||||
|
userId?: string
|
||||||
|
updatedAt?: string
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamEventEntry = {
|
||||||
|
eventId: number
|
||||||
|
streamId: string
|
||||||
|
event: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamEventWriter = {
|
||||||
|
write: (event: Record<string, unknown>) => Promise<StreamEventEntry>
|
||||||
|
flush: () => Promise<void>
|
||||||
|
close: () => Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function resetStreamBuffer(streamId: string): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return
|
||||||
|
try {
|
||||||
|
await redis.del(getEventsKey(streamId), getSeqKey(streamId), getMetaKey(streamId))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to reset stream buffer', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setStreamMeta(streamId: string, meta: StreamMeta): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return
|
||||||
|
try {
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
const payload: Record<string, string> = {
|
||||||
|
status: meta.status,
|
||||||
|
updatedAt: meta.updatedAt || new Date().toISOString(),
|
||||||
|
}
|
||||||
|
if (meta.userId) payload.userId = meta.userId
|
||||||
|
if (meta.error) payload.error = meta.error
|
||||||
|
await redis.hset(getMetaKey(streamId), payload)
|
||||||
|
await redis.expire(getMetaKey(streamId), config.ttlSeconds)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to update stream meta', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getStreamMeta(streamId: string): Promise<StreamMeta | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return null
|
||||||
|
try {
|
||||||
|
const meta = await redis.hgetall(getMetaKey(streamId))
|
||||||
|
if (!meta || Object.keys(meta).length === 0) return null
|
||||||
|
return meta as StreamMeta
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read stream meta', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function appendStreamEvent(
|
||||||
|
streamId: string,
|
||||||
|
event: Record<string, unknown>
|
||||||
|
): Promise<StreamEventEntry> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
return { eventId: 0, streamId, event }
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
const eventJson = JSON.stringify(event)
|
||||||
|
const nextId = await redis.eval(
|
||||||
|
APPEND_STREAM_EVENT_LUA,
|
||||||
|
2,
|
||||||
|
getSeqKey(streamId),
|
||||||
|
getEventsKey(streamId),
|
||||||
|
config.ttlSeconds,
|
||||||
|
config.eventLimit,
|
||||||
|
streamId,
|
||||||
|
eventJson
|
||||||
|
)
|
||||||
|
const eventId = typeof nextId === 'number' ? nextId : Number(nextId)
|
||||||
|
return { eventId, streamId, event }
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to append stream event', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return { eventId: 0, streamId, event }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStreamEventWriter(streamId: string): StreamEventWriter {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
return {
|
||||||
|
write: async (event) => ({ eventId: 0, streamId, event }),
|
||||||
|
flush: async () => {},
|
||||||
|
close: async () => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
let pending: StreamEventEntry[] = []
|
||||||
|
let nextEventId = 0
|
||||||
|
let maxReservedId = 0
|
||||||
|
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||||
|
const scheduleFlush = () => {
|
||||||
|
if (flushTimer) return
|
||||||
|
flushTimer = setTimeout(() => {
|
||||||
|
flushTimer = null
|
||||||
|
void flush()
|
||||||
|
}, config.flushIntervalMs)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reserveIds = async (minCount: number) => {
|
||||||
|
const reserveCount = Math.max(config.reserveBatch, minCount)
|
||||||
|
const newMax = await redis.incrby(getSeqKey(streamId), reserveCount)
|
||||||
|
const startId = newMax - reserveCount + 1
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
nextEventId = startId
|
||||||
|
maxReservedId = newMax
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let flushPromise: Promise<void> | null = null
|
||||||
|
let closed = false
|
||||||
|
|
||||||
|
const doFlush = async () => {
|
||||||
|
if (pending.length === 0) return
|
||||||
|
const batch = pending
|
||||||
|
pending = []
|
||||||
|
try {
|
||||||
|
const key = getEventsKey(streamId)
|
||||||
|
const zaddArgs: (string | number)[] = []
|
||||||
|
for (const entry of batch) {
|
||||||
|
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||||
|
}
|
||||||
|
const pipeline = redis.pipeline()
|
||||||
|
pipeline.zadd(key, ...(zaddArgs as [number, string]))
|
||||||
|
pipeline.expire(key, config.ttlSeconds)
|
||||||
|
pipeline.expire(getSeqKey(streamId), config.ttlSeconds)
|
||||||
|
pipeline.zremrangebyrank(key, 0, -config.eventLimit - 1)
|
||||||
|
await pipeline.exec()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to flush stream events', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
pending = batch.concat(pending)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const flush = async () => {
|
||||||
|
if (flushPromise) {
|
||||||
|
await flushPromise
|
||||||
|
return
|
||||||
|
}
|
||||||
|
flushPromise = doFlush()
|
||||||
|
try {
|
||||||
|
await flushPromise
|
||||||
|
} finally {
|
||||||
|
flushPromise = null
|
||||||
|
if (pending.length > 0) scheduleFlush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const write = async (event: Record<string, unknown>) => {
|
||||||
|
if (closed) return { eventId: 0, streamId, event }
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
await reserveIds(1)
|
||||||
|
}
|
||||||
|
const eventId = nextEventId++
|
||||||
|
const entry: StreamEventEntry = { eventId, streamId, event }
|
||||||
|
pending.push(entry)
|
||||||
|
if (pending.length >= config.flushMaxBatch) {
|
||||||
|
await flush()
|
||||||
|
} else {
|
||||||
|
scheduleFlush()
|
||||||
|
}
|
||||||
|
return entry
|
||||||
|
}
|
||||||
|
|
||||||
|
const close = async () => {
|
||||||
|
closed = true
|
||||||
|
if (flushTimer) {
|
||||||
|
clearTimeout(flushTimer)
|
||||||
|
flushTimer = null
|
||||||
|
}
|
||||||
|
await flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
return { write, flush, close }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function readStreamEvents(
|
||||||
|
streamId: string,
|
||||||
|
afterEventId: number
|
||||||
|
): Promise<StreamEventEntry[]> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return []
|
||||||
|
try {
|
||||||
|
const raw = await redis.zrangebyscore(getEventsKey(streamId), afterEventId + 1, '+inf')
|
||||||
|
return raw
|
||||||
|
.map((entry) => {
|
||||||
|
try {
|
||||||
|
return JSON.parse(entry) as StreamEventEntry
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((entry): entry is StreamEventEntry => Boolean(entry))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read stream events', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
180
apps/sim/lib/copilot/orchestrator/stream-core.ts
Normal file
180
apps/sim/lib/copilot/orchestrator/stream-core.ts
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { ORCHESTRATION_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import {
|
||||||
|
handleSubagentRouting,
|
||||||
|
sseHandlers,
|
||||||
|
subAgentHandlers,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-handlers'
|
||||||
|
import { parseSSEStream } from '@/lib/copilot/orchestrator/sse-parser'
|
||||||
|
import {
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallSummary,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStreamCore')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for the shared stream processing loop.
|
||||||
|
*/
|
||||||
|
export interface StreamLoopOptions extends OrchestratorOptions {
|
||||||
|
/**
|
||||||
|
* Called for each normalized event BEFORE standard handler dispatch.
|
||||||
|
* Return true to skip the default handler for this event.
|
||||||
|
*/
|
||||||
|
onBeforeDispatch?: (event: SSEEvent, context: StreamingContext) => boolean | void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a fresh StreamingContext.
|
||||||
|
*/
|
||||||
|
export function createStreamingContext(overrides?: Partial<StreamingContext>): StreamingContext {
|
||||||
|
return {
|
||||||
|
chatId: undefined,
|
||||||
|
conversationId: undefined,
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
accumulatedContent: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: new Map(),
|
||||||
|
currentThinkingBlock: null,
|
||||||
|
isInThinkingBlock: false,
|
||||||
|
subAgentParentToolCallId: undefined,
|
||||||
|
subAgentContent: {},
|
||||||
|
subAgentToolCalls: {},
|
||||||
|
pendingContent: '',
|
||||||
|
streamComplete: false,
|
||||||
|
wasAborted: false,
|
||||||
|
errors: [],
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run the SSE stream processing loop.
|
||||||
|
*
|
||||||
|
* Handles: fetch -> parse -> normalize -> dedupe -> subagent routing -> handler dispatch.
|
||||||
|
* Callers provide the fetch URL/options and can intercept events via onBeforeDispatch.
|
||||||
|
*/
|
||||||
|
export async function runStreamLoop(
|
||||||
|
fetchUrl: string,
|
||||||
|
fetchOptions: RequestInit,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: StreamLoopOptions
|
||||||
|
): Promise<void> {
|
||||||
|
const { timeout = ORCHESTRATION_TIMEOUT_MS, abortSignal } = options
|
||||||
|
|
||||||
|
const response = await fetch(fetchUrl, {
|
||||||
|
...fetchOptions,
|
||||||
|
signal: abortSignal,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text().catch(() => '')
|
||||||
|
throw new Error(`Copilot backend error (${response.status}): ${errorText || response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.body) {
|
||||||
|
throw new Error('Copilot backend response missing body')
|
||||||
|
}
|
||||||
|
|
||||||
|
const reader = response.body.getReader()
|
||||||
|
const decoder = new TextDecoder()
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
context.errors.push('Request timed out')
|
||||||
|
context.streamComplete = true
|
||||||
|
reader.cancel().catch(() => {})
|
||||||
|
}, timeout)
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const event of parseSSEStream(reader, decoder, abortSignal)) {
|
||||||
|
if (abortSignal?.aborted) {
|
||||||
|
context.wasAborted = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedEvent = normalizeSseEvent(event)
|
||||||
|
|
||||||
|
// Skip duplicate tool events.
|
||||||
|
const shouldSkipToolCall = shouldSkipToolCallEvent(normalizedEvent)
|
||||||
|
const shouldSkipToolResult = shouldSkipToolResultEvent(normalizedEvent)
|
||||||
|
|
||||||
|
if (!shouldSkipToolCall && !shouldSkipToolResult) {
|
||||||
|
try {
|
||||||
|
await options.onEvent?.(normalizedEvent)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to forward SSE event', {
|
||||||
|
type: normalizedEvent.type,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let the caller intercept before standard dispatch.
|
||||||
|
if (options.onBeforeDispatch?.(normalizedEvent, context)) {
|
||||||
|
if (context.streamComplete) break
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard subagent start/end handling.
|
||||||
|
if (normalizedEvent.type === 'subagent_start') {
|
||||||
|
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
||||||
|
const toolCallId = eventData?.tool_call_id as string | undefined
|
||||||
|
if (toolCallId) {
|
||||||
|
context.subAgentParentToolCallId = toolCallId
|
||||||
|
context.subAgentContent[toolCallId] = ''
|
||||||
|
context.subAgentToolCalls[toolCallId] = []
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (normalizedEvent.type === 'subagent_end') {
|
||||||
|
context.subAgentParentToolCallId = undefined
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subagent event routing.
|
||||||
|
if (handleSubagentRouting(normalizedEvent, context)) {
|
||||||
|
const handler = subAgentHandlers[normalizedEvent.type]
|
||||||
|
if (handler) {
|
||||||
|
await handler(normalizedEvent, context, execContext, options)
|
||||||
|
}
|
||||||
|
if (context.streamComplete) break
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main event handler dispatch.
|
||||||
|
const handler = sseHandlers[normalizedEvent.type]
|
||||||
|
if (handler) {
|
||||||
|
await handler(normalizedEvent, context, execContext, options)
|
||||||
|
}
|
||||||
|
if (context.streamComplete) break
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a ToolCallSummary array from the streaming context.
|
||||||
|
*/
|
||||||
|
export function buildToolCallSummaries(context: StreamingContext): ToolCallSummary[] {
|
||||||
|
return Array.from(context.toolCalls.values()).map((toolCall) => ({
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
status: toolCall.status,
|
||||||
|
params: toolCall.params,
|
||||||
|
result: toolCall.result?.output,
|
||||||
|
error: toolCall.error,
|
||||||
|
durationMs:
|
||||||
|
toolCall.endTime && toolCall.startTime ? toolCall.endTime - toolCall.startTime : undefined,
|
||||||
|
}))
|
||||||
|
}
|
||||||
137
apps/sim/lib/copilot/orchestrator/subagent.ts
Normal file
137
apps/sim/lib/copilot/orchestrator/subagent.ts
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallSummary,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSubagentOrchestrator')
|
||||||
|
|
||||||
|
export interface SubagentOrchestratorOptions extends Omit<OrchestratorOptions, 'onComplete'> {
|
||||||
|
userId: string
|
||||||
|
workflowId?: string
|
||||||
|
workspaceId?: string
|
||||||
|
onComplete?: (result: SubagentOrchestratorResult) => void | Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SubagentOrchestratorResult {
|
||||||
|
success: boolean
|
||||||
|
content: string
|
||||||
|
toolCalls: ToolCallSummary[]
|
||||||
|
structuredResult?: {
|
||||||
|
type?: string
|
||||||
|
summary?: string
|
||||||
|
data?: unknown
|
||||||
|
success?: boolean
|
||||||
|
}
|
||||||
|
error?: string
|
||||||
|
errors?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function orchestrateSubagentStream(
|
||||||
|
agentId: string,
|
||||||
|
requestPayload: Record<string, unknown>,
|
||||||
|
options: SubagentOrchestratorOptions
|
||||||
|
): Promise<SubagentOrchestratorResult> {
|
||||||
|
const { userId, workflowId, workspaceId } = options
|
||||||
|
const execContext = await buildExecutionContext(userId, workflowId, workspaceId)
|
||||||
|
|
||||||
|
const msgId = requestPayload?.messageId
|
||||||
|
const context = createStreamingContext({
|
||||||
|
messageId: typeof msgId === 'string' ? msgId : crypto.randomUUID(),
|
||||||
|
})
|
||||||
|
|
||||||
|
let structuredResult: SubagentOrchestratorResult['structuredResult']
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runStreamLoop(
|
||||||
|
`${SIM_AGENT_API_URL}/api/subagent/${agentId}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ ...requestPayload, userId, stream: true }),
|
||||||
|
},
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
{
|
||||||
|
...options,
|
||||||
|
onBeforeDispatch: (event: SSEEvent, ctx: StreamingContext) => {
|
||||||
|
// Handle structured_result / subagent_result - subagent-specific.
|
||||||
|
if (event.type === 'structured_result' || event.type === 'subagent_result') {
|
||||||
|
structuredResult = normalizeStructuredResult(event.data)
|
||||||
|
ctx.streamComplete = true
|
||||||
|
return true // skip default dispatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// For direct subagent calls, events may have the subagent field set
|
||||||
|
// but no subagent_start because this IS the top-level agent.
|
||||||
|
// Skip subagent routing for events where the subagent field matches
|
||||||
|
// the current agentId - these are top-level events.
|
||||||
|
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
||||||
|
return false // let default dispatch handle it
|
||||||
|
}
|
||||||
|
|
||||||
|
return false // let default dispatch handle it
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const result: SubagentOrchestratorResult = {
|
||||||
|
success: context.errors.length === 0 && !context.wasAborted,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
toolCalls: buildToolCallSummaries(context),
|
||||||
|
structuredResult,
|
||||||
|
errors: context.errors.length ? context.errors : undefined,
|
||||||
|
}
|
||||||
|
await options.onComplete?.(result)
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
const err = error instanceof Error ? error : new Error('Subagent orchestration failed')
|
||||||
|
logger.error('Subagent orchestration failed', { error: err.message, agentId })
|
||||||
|
await options.onError?.(err)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
toolCalls: [],
|
||||||
|
error: err.message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeStructuredResult(data: unknown): SubagentOrchestratorResult['structuredResult'] {
|
||||||
|
if (!data || typeof data !== 'object') return undefined
|
||||||
|
const d = data as Record<string, unknown>
|
||||||
|
return {
|
||||||
|
type: (d.result_type || d.type) as string | undefined,
|
||||||
|
summary: d.summary as string | undefined,
|
||||||
|
data: d.data ?? d,
|
||||||
|
success: d.success as boolean | undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildExecutionContext(
|
||||||
|
userId: string,
|
||||||
|
workflowId?: string,
|
||||||
|
workspaceId?: string
|
||||||
|
): Promise<ExecutionContext> {
|
||||||
|
if (workflowId) {
|
||||||
|
return prepareExecutionContext(userId, workflowId)
|
||||||
|
}
|
||||||
|
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||||
|
return {
|
||||||
|
userId,
|
||||||
|
workflowId: workflowId || '',
|
||||||
|
workspaceId,
|
||||||
|
decryptedEnvVars,
|
||||||
|
}
|
||||||
|
}
|
||||||
129
apps/sim/lib/copilot/orchestrator/tool-executor/access.ts
Normal file
129
apps/sim/lib/copilot/orchestrator/tool-executor/access.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { permissions, workflow, workspace } from '@sim/db/schema'
|
||||||
|
import { and, asc, desc, eq, inArray, or } from 'drizzle-orm'
|
||||||
|
|
||||||
|
type WorkflowRecord = typeof workflow.$inferSelect
|
||||||
|
|
||||||
|
export async function ensureWorkflowAccess(
|
||||||
|
workflowId: string,
|
||||||
|
userId: string
|
||||||
|
): Promise<{
|
||||||
|
workflow: WorkflowRecord
|
||||||
|
workspaceId?: string | null
|
||||||
|
}> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) {
|
||||||
|
throw new Error(`Workflow ${workflowId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowRecord.userId === userId) {
|
||||||
|
return { workflow: workflowRecord, workspaceId: workflowRecord.workspaceId }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowRecord.workspaceId) {
|
||||||
|
const [permissionRow] = await db
|
||||||
|
.select({ permissionType: permissions.permissionType })
|
||||||
|
.from(permissions)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workflowRecord.workspaceId),
|
||||||
|
eq(permissions.userId, userId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
if (permissionRow) {
|
||||||
|
return { workflow: workflowRecord, workspaceId: workflowRecord.workspaceId }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDefaultWorkspaceId(userId: string): Promise<string> {
|
||||||
|
const workspaces = await db
|
||||||
|
.select({ workspaceId: workspace.id })
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
.orderBy(desc(workspace.createdAt))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const workspaceId = workspaces[0]?.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
throw new Error('No workspace found for user')
|
||||||
|
}
|
||||||
|
|
||||||
|
return workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function ensureWorkspaceAccess(
|
||||||
|
workspaceId: string,
|
||||||
|
userId: string,
|
||||||
|
requireWrite: boolean
|
||||||
|
): Promise<void> {
|
||||||
|
const [row] = await db
|
||||||
|
.select({
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
})
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspaceId),
|
||||||
|
eq(permissions.userId, userId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!row) {
|
||||||
|
throw new Error(`Workspace ${workspaceId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isOwner = row.ownerId === userId
|
||||||
|
const permissionType = row.permissionType
|
||||||
|
const canWrite = isOwner || permissionType === 'admin' || permissionType === 'write'
|
||||||
|
|
||||||
|
if (requireWrite && !canWrite) {
|
||||||
|
throw new Error('Write or admin access required for this workspace')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireWrite && !canWrite && permissionType !== 'read') {
|
||||||
|
throw new Error('Access denied to workspace')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAccessibleWorkflowsForUser(
|
||||||
|
userId: string,
|
||||||
|
options?: { workspaceId?: string; folderId?: string }
|
||||||
|
) {
|
||||||
|
const workspaceIds = await db
|
||||||
|
.select({ entityId: permissions.entityId })
|
||||||
|
.from(permissions)
|
||||||
|
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
|
||||||
|
const workspaceIdList = workspaceIds.map((row) => row.entityId)
|
||||||
|
|
||||||
|
const workflowConditions = [eq(workflow.userId, userId)]
|
||||||
|
if (workspaceIdList.length > 0) {
|
||||||
|
workflowConditions.push(inArray(workflow.workspaceId, workspaceIdList))
|
||||||
|
}
|
||||||
|
if (options?.workspaceId) {
|
||||||
|
workflowConditions.push(eq(workflow.workspaceId, options.workspaceId))
|
||||||
|
}
|
||||||
|
if (options?.folderId) {
|
||||||
|
workflowConditions.push(eq(workflow.folderId, options.folderId))
|
||||||
|
}
|
||||||
|
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(workflow)
|
||||||
|
.where(or(...workflowConditions))
|
||||||
|
.orderBy(asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id))
|
||||||
|
}
|
||||||
@@ -0,0 +1,288 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { chat, workflowMcpTool } from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
|
import { deployWorkflow, undeployWorkflow } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { checkChatAccess, checkWorkflowAccessForChatCreation } from '@/app/api/chat/utils'
|
||||||
|
import { ensureWorkflowAccess } from '../access'
|
||||||
|
import type { DeployApiParams, DeployChatParams, DeployMcpParams } from '../param-types'
|
||||||
|
|
||||||
|
export async function executeDeployApi(
|
||||||
|
params: DeployApiParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const action = params.action === 'undeploy' ? 'undeploy' : 'deploy'
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
const result = await undeployWorkflow({ workflowId })
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to undeploy workflow' }
|
||||||
|
}
|
||||||
|
return { success: true, output: { workflowId, isDeployed: false } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await deployWorkflow({
|
||||||
|
workflowId,
|
||||||
|
deployedBy: context.userId,
|
||||||
|
workflowName: workflowRecord.name || undefined,
|
||||||
|
})
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to deploy workflow' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
isDeployed: true,
|
||||||
|
deployedAt: result.deployedAt,
|
||||||
|
version: result.version,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeDeployChat(
|
||||||
|
params: DeployChatParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const action = params.action === 'undeploy' ? 'undeploy' : 'deploy'
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
const existing = await db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1)
|
||||||
|
if (!existing.length) {
|
||||||
|
return { success: false, error: 'No active chat deployment found for this workflow' }
|
||||||
|
}
|
||||||
|
const { hasAccess } = await checkChatAccess(existing[0].id, context.userId)
|
||||||
|
if (!hasAccess) {
|
||||||
|
return { success: false, error: 'Unauthorized chat access' }
|
||||||
|
}
|
||||||
|
await db.delete(chat).where(eq(chat.id, existing[0].id))
|
||||||
|
return { success: true, output: { success: true, action: 'undeploy', isDeployed: false } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { hasAccess } = await checkWorkflowAccessForChatCreation(workflowId, context.userId)
|
||||||
|
if (!hasAccess) {
|
||||||
|
return { success: false, error: 'Workflow not found or access denied' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = await db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1)
|
||||||
|
const existingDeployment = existing[0] || null
|
||||||
|
|
||||||
|
const identifier = String(params.identifier || existingDeployment?.identifier || '').trim()
|
||||||
|
const title = String(params.title || existingDeployment?.title || '').trim()
|
||||||
|
if (!identifier || !title) {
|
||||||
|
return { success: false, error: 'Chat identifier and title are required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const identifierPattern = /^[a-z0-9-]+$/
|
||||||
|
if (!identifierPattern.test(identifier)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Identifier can only contain lowercase letters, numbers, and hyphens',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingIdentifier = await db
|
||||||
|
.select()
|
||||||
|
.from(chat)
|
||||||
|
.where(eq(chat.identifier, identifier))
|
||||||
|
.limit(1)
|
||||||
|
if (existingIdentifier.length > 0 && existingIdentifier[0].id !== existingDeployment?.id) {
|
||||||
|
return { success: false, error: 'Identifier already in use' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const deployResult = await deployWorkflow({
|
||||||
|
workflowId,
|
||||||
|
deployedBy: context.userId,
|
||||||
|
})
|
||||||
|
if (!deployResult.success) {
|
||||||
|
return { success: false, error: deployResult.error || 'Failed to deploy workflow' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingCustomizations =
|
||||||
|
(existingDeployment?.customizations as
|
||||||
|
| { primaryColor?: string; welcomeMessage?: string }
|
||||||
|
| undefined) || {}
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
workflowId,
|
||||||
|
identifier,
|
||||||
|
title,
|
||||||
|
description: String(params.description || existingDeployment?.description || ''),
|
||||||
|
customizations: {
|
||||||
|
primaryColor:
|
||||||
|
params.customizations?.primaryColor ||
|
||||||
|
existingCustomizations.primaryColor ||
|
||||||
|
'var(--brand-primary-hover-hex)',
|
||||||
|
welcomeMessage:
|
||||||
|
params.customizations?.welcomeMessage ||
|
||||||
|
existingCustomizations.welcomeMessage ||
|
||||||
|
'Hi there! How can I help you today?',
|
||||||
|
},
|
||||||
|
authType: params.authType || existingDeployment?.authType || 'public',
|
||||||
|
password: params.password,
|
||||||
|
allowedEmails: params.allowedEmails || existingDeployment?.allowedEmails || [],
|
||||||
|
outputConfigs: params.outputConfigs || existingDeployment?.outputConfigs || [],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingDeployment) {
|
||||||
|
await db
|
||||||
|
.update(chat)
|
||||||
|
.set({
|
||||||
|
identifier: payload.identifier,
|
||||||
|
title: payload.title,
|
||||||
|
description: payload.description,
|
||||||
|
customizations: payload.customizations,
|
||||||
|
authType: payload.authType,
|
||||||
|
password: payload.password || existingDeployment.password,
|
||||||
|
allowedEmails:
|
||||||
|
payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [],
|
||||||
|
outputConfigs: payload.outputConfigs,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(chat.id, existingDeployment.id))
|
||||||
|
} else {
|
||||||
|
await db.insert(chat).values({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
identifier: payload.identifier,
|
||||||
|
title: payload.title,
|
||||||
|
description: payload.description,
|
||||||
|
customizations: payload.customizations,
|
||||||
|
isActive: true,
|
||||||
|
authType: payload.authType,
|
||||||
|
password: payload.password || null,
|
||||||
|
allowedEmails:
|
||||||
|
payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [],
|
||||||
|
outputConfigs: payload.outputConfigs,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { success: true, action: 'deploy', isDeployed: true, identifier },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeDeployMcp(
|
||||||
|
params: DeployMcpParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowRecord.isDeployed) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverId = params.serverId
|
||||||
|
if (!serverId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'serverId is required. Use list_workspace_mcp_servers to get available servers.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingTool = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(
|
||||||
|
and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const toolName = sanitizeToolName(
|
||||||
|
params.toolName || workflowRecord.name || `workflow_${workflowId}`
|
||||||
|
)
|
||||||
|
const toolDescription =
|
||||||
|
params.toolDescription ||
|
||||||
|
workflowRecord.description ||
|
||||||
|
`Execute ${workflowRecord.name} workflow`
|
||||||
|
const parameterSchema = params.parameterSchema || {}
|
||||||
|
|
||||||
|
if (existingTool.length > 0) {
|
||||||
|
const toolId = existingTool[0].id
|
||||||
|
await db
|
||||||
|
.update(workflowMcpTool)
|
||||||
|
.set({
|
||||||
|
toolName,
|
||||||
|
toolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowMcpTool.id, toolId))
|
||||||
|
return { success: true, output: { toolId, toolName, toolDescription, updated: true } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolId = crypto.randomUUID()
|
||||||
|
await db.insert(workflowMcpTool).values({
|
||||||
|
id: toolId,
|
||||||
|
serverId,
|
||||||
|
workflowId,
|
||||||
|
toolName,
|
||||||
|
toolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { toolId, toolName, toolDescription, updated: false } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRedeploy(context: ExecutionContext): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const result = await deployWorkflow({ workflowId, deployedBy: context.userId })
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to redeploy workflow' }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { workflowId, deployedAt: result.deployedAt || null, version: result.version },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './deploy'
|
||||||
|
export * from './manage'
|
||||||
@@ -0,0 +1,226 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { chat, workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||||
|
import { eq, inArray } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
|
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||||
|
import { ensureWorkflowAccess } from '../access'
|
||||||
|
import type {
|
||||||
|
CheckDeploymentStatusParams,
|
||||||
|
CreateWorkspaceMcpServerParams,
|
||||||
|
ListWorkspaceMcpServersParams,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
export async function executeCheckDeploymentStatus(
|
||||||
|
params: CheckDeploymentStatusParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
|
||||||
|
const [apiDeploy, chatDeploy] = await Promise.all([
|
||||||
|
db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1),
|
||||||
|
db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1),
|
||||||
|
])
|
||||||
|
|
||||||
|
const isApiDeployed = apiDeploy[0]?.isDeployed || false
|
||||||
|
const apiDetails = {
|
||||||
|
isDeployed: isApiDeployed,
|
||||||
|
deployedAt: apiDeploy[0]?.deployedAt || null,
|
||||||
|
endpoint: isApiDeployed ? `/api/workflows/${workflowId}/execute` : null,
|
||||||
|
apiKey: workflowRecord.workspaceId ? 'Workspace API keys' : 'Personal API keys',
|
||||||
|
needsRedeployment: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const isChatDeployed = !!chatDeploy[0]
|
||||||
|
const chatCustomizations =
|
||||||
|
(chatDeploy[0]?.customizations as
|
||||||
|
| { welcomeMessage?: string; primaryColor?: string }
|
||||||
|
| undefined) || {}
|
||||||
|
const chatDetails = {
|
||||||
|
isDeployed: isChatDeployed,
|
||||||
|
chatId: chatDeploy[0]?.id || null,
|
||||||
|
identifier: chatDeploy[0]?.identifier || null,
|
||||||
|
chatUrl: isChatDeployed ? `/chat/${chatDeploy[0]?.identifier}` : null,
|
||||||
|
title: chatDeploy[0]?.title || null,
|
||||||
|
description: chatDeploy[0]?.description || null,
|
||||||
|
authType: chatDeploy[0]?.authType || null,
|
||||||
|
allowedEmails: chatDeploy[0]?.allowedEmails || null,
|
||||||
|
outputConfigs: chatDeploy[0]?.outputConfigs || null,
|
||||||
|
welcomeMessage: chatCustomizations.welcomeMessage || null,
|
||||||
|
primaryColor: chatCustomizations.primaryColor || null,
|
||||||
|
hasPassword: Boolean(chatDeploy[0]?.password),
|
||||||
|
}
|
||||||
|
|
||||||
|
const mcpDetails: {
|
||||||
|
isDeployed: boolean
|
||||||
|
servers: Array<{
|
||||||
|
serverId: string
|
||||||
|
serverName: string
|
||||||
|
toolName: string
|
||||||
|
toolDescription: string | null
|
||||||
|
parameterSchema: unknown
|
||||||
|
toolId: string
|
||||||
|
}>
|
||||||
|
} = { isDeployed: false, servers: [] }
|
||||||
|
if (workspaceId) {
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
serverId: workflowMcpServer.id,
|
||||||
|
serverName: workflowMcpServer.name,
|
||||||
|
toolName: workflowMcpTool.toolName,
|
||||||
|
toolDescription: workflowMcpTool.toolDescription,
|
||||||
|
parameterSchema: workflowMcpTool.parameterSchema,
|
||||||
|
toolId: workflowMcpTool.id,
|
||||||
|
})
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.innerJoin(workflowMcpServer, eq(workflowMcpTool.serverId, workflowMcpServer.id))
|
||||||
|
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
|
||||||
|
if (servers.length > 0) {
|
||||||
|
mcpDetails.isDeployed = true
|
||||||
|
mcpDetails.servers = servers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isDeployed = apiDetails.isDeployed || chatDetails.isDeployed || mcpDetails.isDeployed
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { isDeployed, api: apiDetails, chat: chatDetails, mcp: mcpDetails },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListWorkspaceMcpServers(
|
||||||
|
params: ListWorkspaceMcpServersParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
id: workflowMcpServer.id,
|
||||||
|
name: workflowMcpServer.name,
|
||||||
|
description: workflowMcpServer.description,
|
||||||
|
})
|
||||||
|
.from(workflowMcpServer)
|
||||||
|
.where(eq(workflowMcpServer.workspaceId, workspaceId))
|
||||||
|
|
||||||
|
const serverIds = servers.map((server) => server.id)
|
||||||
|
const tools =
|
||||||
|
serverIds.length > 0
|
||||||
|
? await db
|
||||||
|
.select({
|
||||||
|
serverId: workflowMcpTool.serverId,
|
||||||
|
toolName: workflowMcpTool.toolName,
|
||||||
|
})
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(inArray(workflowMcpTool.serverId, serverIds))
|
||||||
|
: []
|
||||||
|
|
||||||
|
const toolNamesByServer: Record<string, string[]> = {}
|
||||||
|
for (const tool of tools) {
|
||||||
|
if (!toolNamesByServer[tool.serverId]) {
|
||||||
|
toolNamesByServer[tool.serverId] = []
|
||||||
|
}
|
||||||
|
toolNamesByServer[tool.serverId].push(tool.toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
const serversWithToolNames = servers.map((server) => ({
|
||||||
|
...server,
|
||||||
|
toolCount: toolNamesByServer[server.id]?.length || 0,
|
||||||
|
toolNames: toolNamesByServer[server.id] || [],
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { servers: serversWithToolNames, count: servers.length } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeCreateWorkspaceMcpServer(
|
||||||
|
params: CreateWorkspaceMcpServerParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = params.name?.trim()
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverId = crypto.randomUUID()
|
||||||
|
const [server] = await db
|
||||||
|
.insert(workflowMcpServer)
|
||||||
|
.values({
|
||||||
|
id: serverId,
|
||||||
|
workspaceId,
|
||||||
|
createdBy: context.userId,
|
||||||
|
name,
|
||||||
|
description: params.description?.trim() || null,
|
||||||
|
isPublic: params.isPublic ?? false,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
const workflowIds: string[] = params.workflowIds || []
|
||||||
|
const addedTools: Array<{ workflowId: string; toolName: string }> = []
|
||||||
|
|
||||||
|
if (workflowIds.length > 0) {
|
||||||
|
const workflows = await db.select().from(workflow).where(inArray(workflow.id, workflowIds))
|
||||||
|
|
||||||
|
for (const wf of workflows) {
|
||||||
|
if (wf.workspaceId !== workspaceId || !wf.isDeployed) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const hasStartBlock = await hasValidStartBlock(wf.id)
|
||||||
|
if (!hasStartBlock) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const toolName = sanitizeToolName(wf.name || `workflow_${wf.id}`)
|
||||||
|
await db.insert(workflowMcpTool).values({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
serverId,
|
||||||
|
workflowId: wf.id,
|
||||||
|
toolName,
|
||||||
|
toolDescription: wf.description || `Execute ${wf.name} workflow`,
|
||||||
|
parameterSchema: {},
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
addedTools.push({ workflowId: wf.id, toolName })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, output: { server, addedTools } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
248
apps/sim/lib/copilot/orchestrator/tool-executor/index.ts
Normal file
248
apps/sim/lib/copilot/orchestrator/tool-executor/index.ts
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
ToolCallResult,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { getTool, resolveToolId } from '@/tools/utils'
|
||||||
|
import {
|
||||||
|
executeCheckDeploymentStatus,
|
||||||
|
executeCreateWorkspaceMcpServer,
|
||||||
|
executeDeployApi,
|
||||||
|
executeDeployChat,
|
||||||
|
executeDeployMcp,
|
||||||
|
executeListWorkspaceMcpServers,
|
||||||
|
executeRedeploy,
|
||||||
|
} from './deployment-tools'
|
||||||
|
import { executeIntegrationToolDirect } from './integration-tools'
|
||||||
|
import type {
|
||||||
|
CheckDeploymentStatusParams,
|
||||||
|
CreateFolderParams,
|
||||||
|
CreateWorkflowParams,
|
||||||
|
CreateWorkspaceMcpServerParams,
|
||||||
|
DeployApiParams,
|
||||||
|
DeployChatParams,
|
||||||
|
DeployMcpParams,
|
||||||
|
GetBlockOutputsParams,
|
||||||
|
GetBlockUpstreamReferencesParams,
|
||||||
|
GetUserWorkflowParams,
|
||||||
|
GetWorkflowDataParams,
|
||||||
|
GetWorkflowFromNameParams,
|
||||||
|
ListFoldersParams,
|
||||||
|
ListUserWorkflowsParams,
|
||||||
|
ListWorkspaceMcpServersParams,
|
||||||
|
MoveFolderParams,
|
||||||
|
MoveWorkflowParams,
|
||||||
|
RenameWorkflowParams,
|
||||||
|
RunWorkflowParams,
|
||||||
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
} from './param-types'
|
||||||
|
import {
|
||||||
|
executeCreateFolder,
|
||||||
|
executeCreateWorkflow,
|
||||||
|
executeGetBlockOutputs,
|
||||||
|
executeGetBlockUpstreamReferences,
|
||||||
|
executeGetUserWorkflow,
|
||||||
|
executeGetWorkflowData,
|
||||||
|
executeGetWorkflowFromName,
|
||||||
|
executeListFolders,
|
||||||
|
executeListUserWorkflows,
|
||||||
|
executeListUserWorkspaces,
|
||||||
|
executeMoveFolder,
|
||||||
|
executeMoveWorkflow,
|
||||||
|
executeRenameWorkflow,
|
||||||
|
executeRunWorkflow,
|
||||||
|
executeSetGlobalWorkflowVariables,
|
||||||
|
} from './workflow-tools'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotToolExecutor')
|
||||||
|
|
||||||
|
const SERVER_TOOLS = new Set<string>([
|
||||||
|
'get_blocks_and_tools',
|
||||||
|
'get_blocks_metadata',
|
||||||
|
'get_block_options',
|
||||||
|
'get_block_config',
|
||||||
|
'get_trigger_blocks',
|
||||||
|
'edit_workflow',
|
||||||
|
'get_workflow_console',
|
||||||
|
'search_documentation',
|
||||||
|
'search_online',
|
||||||
|
'set_environment_variables',
|
||||||
|
'get_credentials',
|
||||||
|
'make_api_request',
|
||||||
|
'knowledge_base',
|
||||||
|
])
|
||||||
|
|
||||||
|
const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||||
|
string,
|
||||||
|
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
|
||||||
|
> = {
|
||||||
|
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
|
||||||
|
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
|
||||||
|
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
|
||||||
|
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
|
||||||
|
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
|
||||||
|
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
|
||||||
|
create_folder: (p, c) => executeCreateFolder(p as CreateFolderParams, c),
|
||||||
|
rename_workflow: (p, c) => executeRenameWorkflow(p as unknown as RenameWorkflowParams, c),
|
||||||
|
move_workflow: (p, c) => executeMoveWorkflow(p as unknown as MoveWorkflowParams, c),
|
||||||
|
move_folder: (p, c) => executeMoveFolder(p as unknown as MoveFolderParams, c),
|
||||||
|
get_workflow_data: (p, c) => executeGetWorkflowData(p as GetWorkflowDataParams, c),
|
||||||
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
|
get_block_upstream_references: (p, c) =>
|
||||||
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
|
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
||||||
|
set_global_workflow_variables: (p, c) =>
|
||||||
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
|
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
||||||
|
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
||||||
|
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
||||||
|
redeploy: (_p, c) => executeRedeploy(c),
|
||||||
|
check_deployment_status: (p, c) => executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
|
executeListWorkspaceMcpServers(p as ListWorkspaceMcpServersParams, c),
|
||||||
|
create_workspace_mcp_server: (p, c) =>
|
||||||
|
executeCreateWorkspaceMcpServer(p as CreateWorkspaceMcpServerParams, c),
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool server-side without calling internal routes.
|
||||||
|
*/
|
||||||
|
export async function executeToolServerSide(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const toolName = toolCall.name
|
||||||
|
const resolvedToolName = resolveToolId(toolName)
|
||||||
|
|
||||||
|
if (SERVER_TOOLS.has(toolName)) {
|
||||||
|
return executeServerToolDirect(toolName, toolCall.params || {}, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName in SIM_WORKFLOW_TOOL_HANDLERS) {
|
||||||
|
return executeSimWorkflowTool(toolName, toolCall.params || {}, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolConfig = getTool(resolvedToolName)
|
||||||
|
if (!toolConfig) {
|
||||||
|
logger.warn('Tool not found in registry', { toolName, resolvedToolName })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Tool not found: ${toolName}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return executeIntegrationToolDirect(toolCall, toolConfig, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a server tool directly via the server tool router.
|
||||||
|
*/
|
||||||
|
async function executeServerToolDirect(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
// Inject workflowId from context if not provided in params
|
||||||
|
// This is needed for tools like set_environment_variables that require workflowId
|
||||||
|
const enrichedParams = { ...params }
|
||||||
|
if (!enrichedParams.workflowId && context.workflowId) {
|
||||||
|
enrichedParams.workflowId = context.workflowId
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await routeExecution(toolName, enrichedParams, { userId: context.userId })
|
||||||
|
return { success: true, output: result }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Server tool execution failed', {
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Server tool execution failed',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeSimWorkflowTool(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const handler = SIM_WORKFLOW_TOOL_HANDLERS[toolName]
|
||||||
|
if (!handler) return { success: false, error: `Unsupported workflow tool: ${toolName}` }
|
||||||
|
return handler(params, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Notify the copilot backend that a tool has completed.
|
||||||
|
*/
|
||||||
|
export async function markToolComplete(
|
||||||
|
toolCallId: string,
|
||||||
|
toolName: string,
|
||||||
|
status: number,
|
||||||
|
message?: unknown,
|
||||||
|
data?: unknown
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status,
|
||||||
|
message,
|
||||||
|
data,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.warn('Mark-complete call failed', { toolCallId, status: response.status })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Mark-complete call failed', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare execution context with cached environment values.
|
||||||
|
*/
|
||||||
|
export async function prepareExecutionContext(
|
||||||
|
userId: string,
|
||||||
|
workflowId: string
|
||||||
|
): Promise<ExecutionContext> {
|
||||||
|
const workflowResult = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
const workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
||||||
|
|
||||||
|
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||||
|
|
||||||
|
return {
|
||||||
|
userId,
|
||||||
|
workflowId,
|
||||||
|
workspaceId,
|
||||||
|
decryptedEnvVars,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,105 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { account, workflow } from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
ToolCallResult,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||||
|
import { executeTool } from '@/tools'
|
||||||
|
import { resolveToolId } from '@/tools/utils'
|
||||||
|
|
||||||
|
export async function executeIntegrationToolDirect(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
toolConfig: {
|
||||||
|
oauth?: { required?: boolean; provider?: string }
|
||||||
|
params?: { apiKey?: { required?: boolean } }
|
||||||
|
},
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const { userId, workflowId } = context
|
||||||
|
const toolName = resolveToolId(toolCall.name)
|
||||||
|
const toolArgs = toolCall.params || {}
|
||||||
|
|
||||||
|
let workspaceId = context.workspaceId
|
||||||
|
if (!workspaceId && workflowId) {
|
||||||
|
const workflowResult = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedEnvVars =
|
||||||
|
context.decryptedEnvVars || (await getEffectiveDecryptedEnv(userId, workspaceId))
|
||||||
|
|
||||||
|
// Deep resolution walks nested objects to replace {{ENV_VAR}} references.
|
||||||
|
// Safe because tool arguments originate from the LLM (not direct user input)
|
||||||
|
// and env vars belong to the user themselves.
|
||||||
|
const executionParams = resolveEnvVarReferences(toolArgs, decryptedEnvVars, {
|
||||||
|
deep: true,
|
||||||
|
}) as Record<string, unknown>
|
||||||
|
|
||||||
|
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
||||||
|
const provider = toolConfig.oauth.provider
|
||||||
|
const accounts = await db
|
||||||
|
.select()
|
||||||
|
.from(account)
|
||||||
|
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!accounts.length) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `No ${provider} account connected. Please connect your account first.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const acc = accounts[0]
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
const { accessToken } = await refreshTokenIfNeeded(requestId, acc, acc.id)
|
||||||
|
|
||||||
|
if (!accessToken) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
executionParams.accessToken = accessToken
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolConfig.params?.apiKey?.required && !executionParams.apiKey) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
executionParams._context = {
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName === 'function_execute') {
|
||||||
|
executionParams.envVars = decryptedEnvVars
|
||||||
|
executionParams.workflowVariables = {}
|
||||||
|
executionParams.blockData = {}
|
||||||
|
executionParams.blockNameMapping = {}
|
||||||
|
executionParams.language = executionParams.language || 'javascript'
|
||||||
|
executionParams.timeout = executionParams.timeout || 30000
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeTool(toolName, executionParams)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: result.output,
|
||||||
|
error: result.error,
|
||||||
|
}
|
||||||
|
}
|
||||||
144
apps/sim/lib/copilot/orchestrator/tool-executor/param-types.ts
Normal file
144
apps/sim/lib/copilot/orchestrator/tool-executor/param-types.ts
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
/**
|
||||||
|
* Typed parameter interfaces for tool executor functions.
|
||||||
|
* Replaces Record<string, any> with specific shapes based on actual property access.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// === Workflow Query Params ===
|
||||||
|
|
||||||
|
export interface GetUserWorkflowParams {
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetWorkflowFromNameParams {
|
||||||
|
workflow_name?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListUserWorkflowsParams {
|
||||||
|
workspaceId?: string
|
||||||
|
folderId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetWorkflowDataParams {
|
||||||
|
workflowId?: string
|
||||||
|
data_type?: string
|
||||||
|
dataType?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetBlockOutputsParams {
|
||||||
|
workflowId?: string
|
||||||
|
blockIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetBlockUpstreamReferencesParams {
|
||||||
|
workflowId?: string
|
||||||
|
blockIds: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListFoldersParams {
|
||||||
|
workspaceId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Workflow Mutation Params ===
|
||||||
|
|
||||||
|
export interface CreateWorkflowParams {
|
||||||
|
name?: string
|
||||||
|
workspaceId?: string
|
||||||
|
folderId?: string
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateFolderParams {
|
||||||
|
name?: string
|
||||||
|
workspaceId?: string
|
||||||
|
parentId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunWorkflowParams {
|
||||||
|
workflowId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableOperation {
|
||||||
|
name: string
|
||||||
|
operation: 'add' | 'edit' | 'delete'
|
||||||
|
value?: unknown
|
||||||
|
type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SetGlobalWorkflowVariablesParams {
|
||||||
|
workflowId?: string
|
||||||
|
operations?: VariableOperation[]
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Deployment Params ===
|
||||||
|
|
||||||
|
export interface DeployApiParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeployChatParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy' | 'update'
|
||||||
|
identifier?: string
|
||||||
|
title?: string
|
||||||
|
description?: string
|
||||||
|
customizations?: {
|
||||||
|
primaryColor?: string
|
||||||
|
secondaryColor?: string
|
||||||
|
welcomeMessage?: string
|
||||||
|
iconUrl?: string
|
||||||
|
}
|
||||||
|
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
||||||
|
password?: string
|
||||||
|
subdomain?: string
|
||||||
|
allowedEmails?: string[]
|
||||||
|
outputConfigs?: unknown[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeployMcpParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
serverId?: string
|
||||||
|
parameterSchema?: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CheckDeploymentStatusParams {
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListWorkspaceMcpServersParams {
|
||||||
|
workspaceId?: string
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateWorkspaceMcpServerParams {
|
||||||
|
workflowId?: string
|
||||||
|
name?: string
|
||||||
|
description?: string
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
serverName?: string
|
||||||
|
isPublic?: boolean
|
||||||
|
workflowIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
|
export interface RenameWorkflowParams {
|
||||||
|
workflowId: string
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MoveWorkflowParams {
|
||||||
|
workflowId: string
|
||||||
|
folderId: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MoveFolderParams {
|
||||||
|
folderId: string
|
||||||
|
parentId: string | null
|
||||||
|
}
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './mutations'
|
||||||
|
export * from './queries'
|
||||||
@@ -0,0 +1,370 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||||
|
import { and, eq, isNull, max } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||||
|
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
|
||||||
|
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { ensureWorkflowAccess, ensureWorkspaceAccess, getDefaultWorkspaceId } from '../access'
|
||||||
|
import type {
|
||||||
|
CreateFolderParams,
|
||||||
|
CreateWorkflowParams,
|
||||||
|
MoveFolderParams,
|
||||||
|
MoveWorkflowParams,
|
||||||
|
RenameWorkflowParams,
|
||||||
|
RunWorkflowParams,
|
||||||
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
VariableOperation,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowMutations')
|
||||||
|
|
||||||
|
export async function executeCreateWorkflow(
|
||||||
|
params: CreateWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const name = typeof params?.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Workflow name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
const description = typeof params?.description === 'string' ? params.description : null
|
||||||
|
if (description && description.length > 2000) {
|
||||||
|
return { success: false, error: 'Description must be 2000 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = params?.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
const folderId = params?.folderId || null
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, true)
|
||||||
|
|
||||||
|
const workflowId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
|
||||||
|
const [maxResult] = await db
|
||||||
|
.select({ maxOrder: max(workflow.sortOrder) })
|
||||||
|
.from(workflow)
|
||||||
|
.where(and(eq(workflow.workspaceId, workspaceId), folderCondition))
|
||||||
|
const sortOrder = (maxResult?.maxOrder ?? 0) + 1
|
||||||
|
|
||||||
|
await db.insert(workflow).values({
|
||||||
|
id: workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
workspaceId,
|
||||||
|
folderId,
|
||||||
|
sortOrder,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
color: '#3972F6',
|
||||||
|
lastSynced: now,
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
isDeployed: false,
|
||||||
|
runCount: 0,
|
||||||
|
variables: {},
|
||||||
|
})
|
||||||
|
|
||||||
|
const { workflowState } = buildDefaultWorkflowArtifacts()
|
||||||
|
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState)
|
||||||
|
if (!saveResult.success) {
|
||||||
|
throw new Error(saveResult.error || 'Failed to save workflow state')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: name,
|
||||||
|
workspaceId,
|
||||||
|
folderId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeCreateFolder(
|
||||||
|
params: CreateFolderParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const name = typeof params?.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Folder name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = params?.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
const parentId = params?.parentId || null
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, true)
|
||||||
|
|
||||||
|
const [maxResult] = await db
|
||||||
|
.select({ maxOrder: max(workflowFolder.sortOrder) })
|
||||||
|
.from(workflowFolder)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowFolder.workspaceId, workspaceId),
|
||||||
|
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
const sortOrder = (maxResult?.maxOrder ?? 0) + 1
|
||||||
|
|
||||||
|
const folderId = crypto.randomUUID()
|
||||||
|
await db.insert(workflowFolder).values({
|
||||||
|
id: folderId,
|
||||||
|
userId: context.userId,
|
||||||
|
workspaceId,
|
||||||
|
parentId,
|
||||||
|
name,
|
||||||
|
sortOrder,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { folderId, name, workspaceId, parentId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRunWorkflow(
|
||||||
|
params: RunWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const result = await executeWorkflow(
|
||||||
|
{
|
||||||
|
id: workflowRecord.id,
|
||||||
|
userId: workflowRecord.userId,
|
||||||
|
workspaceId: workflowRecord.workspaceId,
|
||||||
|
variables: workflowRecord.variables || {},
|
||||||
|
},
|
||||||
|
generateRequestId(),
|
||||||
|
params.workflow_input || params.input || undefined,
|
||||||
|
context.userId,
|
||||||
|
{ enabled: true, useDraftState: true }
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: {
|
||||||
|
executionId: result.metadata?.executionId,
|
||||||
|
success: result.success,
|
||||||
|
output: result.output,
|
||||||
|
logs: result.logs,
|
||||||
|
},
|
||||||
|
error: result.success ? undefined : result.error || 'Workflow execution failed',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeSetGlobalWorkflowVariables(
|
||||||
|
params: SetGlobalWorkflowVariablesParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const operations: VariableOperation[] = Array.isArray(params.operations)
|
||||||
|
? params.operations
|
||||||
|
: []
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
interface WorkflowVariable {
|
||||||
|
id: string
|
||||||
|
workflowId?: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
value?: unknown
|
||||||
|
}
|
||||||
|
const currentVarsRecord = (workflowRecord.variables as Record<string, unknown>) || {}
|
||||||
|
const byName: Record<string, WorkflowVariable> = {}
|
||||||
|
Object.values(currentVarsRecord).forEach((v) => {
|
||||||
|
if (v && typeof v === 'object' && 'id' in v && 'name' in v) {
|
||||||
|
const variable = v as WorkflowVariable
|
||||||
|
byName[String(variable.name)] = variable
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const op of operations) {
|
||||||
|
const key = String(op?.name || '')
|
||||||
|
if (!key) continue
|
||||||
|
const nextType = op?.type || byName[key]?.type || 'plain'
|
||||||
|
const coerceValue = (value: unknown, type: string): unknown => {
|
||||||
|
if (value === undefined) return value
|
||||||
|
if (type === 'number') {
|
||||||
|
const n = Number(value)
|
||||||
|
return Number.isNaN(n) ? value : n
|
||||||
|
}
|
||||||
|
if (type === 'boolean') {
|
||||||
|
const v = String(value).trim().toLowerCase()
|
||||||
|
if (v === 'true') return true
|
||||||
|
if (v === 'false') return false
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
if (type === 'array' || type === 'object') {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(String(value))
|
||||||
|
if (type === 'array' && Array.isArray(parsed)) return parsed
|
||||||
|
if (type === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed))
|
||||||
|
return parsed
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse JSON value for variable coercion', { error: error instanceof Error ? error.message : String(error) })
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.operation === 'delete') {
|
||||||
|
delete byName[key]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const typedValue = coerceValue(op.value, nextType)
|
||||||
|
if (op.operation === 'add') {
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (op.operation === 'edit') {
|
||||||
|
if (!byName[key]) {
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
byName[key] = {
|
||||||
|
...byName[key],
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextVarsRecord = Object.fromEntries(
|
||||||
|
Object.values(byName).map((v) => [String(v.id), v])
|
||||||
|
)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ variables: nextVarsRecord, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { updated: Object.values(byName).length } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRenameWorkflow(
|
||||||
|
params: RenameWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const name = typeof params.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Workflow name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ name, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { workflowId, name } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeMoveWorkflow(
|
||||||
|
params: MoveWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const folderId = params.folderId || null
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ folderId, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { workflowId, folderId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeMoveFolder(
|
||||||
|
params: MoveFolderParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const folderId = params.folderId
|
||||||
|
if (!folderId) {
|
||||||
|
return { success: false, error: 'folderId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const parentId = params.parentId || null
|
||||||
|
|
||||||
|
if (parentId === folderId) {
|
||||||
|
return { success: false, error: 'A folder cannot be moved into itself' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflowFolder)
|
||||||
|
.set({ parentId, updatedAt: new Date() })
|
||||||
|
.where(eq(workflowFolder.id, folderId))
|
||||||
|
|
||||||
|
return { success: true, output: { folderId, parentId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,564 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||||
|
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import {
|
||||||
|
formatNormalizedWorkflowForCopilot,
|
||||||
|
normalizeWorkflowName,
|
||||||
|
} from '@/lib/copilot/tools/shared/workflow-utils'
|
||||||
|
import { mcpService } from '@/lib/mcp/service'
|
||||||
|
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
|
||||||
|
import { getBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||||
|
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||||
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
|
import { normalizeName } from '@/executor/constants'
|
||||||
|
import {
|
||||||
|
ensureWorkflowAccess,
|
||||||
|
ensureWorkspaceAccess,
|
||||||
|
getAccessibleWorkflowsForUser,
|
||||||
|
getDefaultWorkspaceId,
|
||||||
|
} from '../access'
|
||||||
|
import type {
|
||||||
|
GetBlockOutputsParams,
|
||||||
|
GetBlockUpstreamReferencesParams,
|
||||||
|
GetUserWorkflowParams,
|
||||||
|
GetWorkflowDataParams,
|
||||||
|
GetWorkflowFromNameParams,
|
||||||
|
ListFoldersParams,
|
||||||
|
ListUserWorkflowsParams,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
export async function executeGetUserWorkflow(
|
||||||
|
params: GetUserWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||||
|
workflowId,
|
||||||
|
context.userId
|
||||||
|
)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||||
|
if (!userWorkflow) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: workflowRecord.name || '',
|
||||||
|
workspaceId,
|
||||||
|
userWorkflow,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetWorkflowFromName(
|
||||||
|
params: GetWorkflowFromNameParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
|
||||||
|
if (!workflowName) {
|
||||||
|
return { success: false, error: 'workflow_name is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflows = await getAccessibleWorkflowsForUser(context.userId)
|
||||||
|
|
||||||
|
const targetName = normalizeWorkflowName(workflowName)
|
||||||
|
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
|
||||||
|
if (!match) {
|
||||||
|
return { success: false, error: `Workflow not found: ${workflowName}` }
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(match.id)
|
||||||
|
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||||
|
if (!userWorkflow) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId: match.id,
|
||||||
|
workflowName: match.name || '',
|
||||||
|
workspaceId: match.workspaceId,
|
||||||
|
userWorkflow,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListUserWorkflows(
|
||||||
|
params: ListUserWorkflowsParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaceId = params?.workspaceId as string | undefined
|
||||||
|
const folderId = params?.folderId as string | undefined
|
||||||
|
|
||||||
|
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
|
||||||
|
|
||||||
|
const workflowList = workflows.map((w) => ({
|
||||||
|
workflowId: w.id,
|
||||||
|
workflowName: w.name || '',
|
||||||
|
workspaceId: w.workspaceId,
|
||||||
|
folderId: w.folderId,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { workflows: workflowList } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListUserWorkspaces(
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaces = await db
|
||||||
|
.select({
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
workspaceName: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
})
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(and(eq(permissions.userId, context.userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
.orderBy(desc(workspace.createdAt))
|
||||||
|
|
||||||
|
const output = workspaces.map((row) => ({
|
||||||
|
workspaceId: row.workspaceId,
|
||||||
|
workspaceName: row.workspaceName,
|
||||||
|
role: row.ownerId === context.userId ? 'owner' : row.permissionType,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { workspaces: output } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListFolders(
|
||||||
|
params: ListFoldersParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaceId =
|
||||||
|
(params?.workspaceId as string | undefined) || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, false)
|
||||||
|
|
||||||
|
const folders = await db
|
||||||
|
.select({
|
||||||
|
folderId: workflowFolder.id,
|
||||||
|
folderName: workflowFolder.name,
|
||||||
|
parentId: workflowFolder.parentId,
|
||||||
|
sortOrder: workflowFolder.sortOrder,
|
||||||
|
})
|
||||||
|
.from(workflowFolder)
|
||||||
|
.where(eq(workflowFolder.workspaceId, workspaceId))
|
||||||
|
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workspaceId,
|
||||||
|
folders,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetWorkflowData(
|
||||||
|
params: GetWorkflowDataParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
const dataType = params.data_type || params.dataType || ''
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!dataType) {
|
||||||
|
return { success: false, error: 'data_type is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||||
|
workflowId,
|
||||||
|
context.userId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (dataType === 'global_variables') {
|
||||||
|
const variablesRecord = (workflowRecord.variables as Record<string, unknown>) || {}
|
||||||
|
const variables = Object.values(variablesRecord).map((v) => {
|
||||||
|
const variable = v as Record<string, unknown> | null
|
||||||
|
return {
|
||||||
|
id: String(variable?.id || ''),
|
||||||
|
name: String(variable?.name || ''),
|
||||||
|
value: variable?.value,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return { success: true, output: { variables } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'custom_tools') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const conditions = [
|
||||||
|
eq(customTools.workspaceId, workspaceId),
|
||||||
|
and(eq(customTools.userId, context.userId), isNull(customTools.workspaceId)),
|
||||||
|
]
|
||||||
|
const toolsRows = await db
|
||||||
|
.select()
|
||||||
|
.from(customTools)
|
||||||
|
.where(or(...conditions))
|
||||||
|
.orderBy(desc(customTools.createdAt))
|
||||||
|
|
||||||
|
const customToolsData = toolsRows.map((tool) => {
|
||||||
|
const schema = tool.schema as Record<string, unknown> | null
|
||||||
|
const fn = (schema?.function ?? {}) as Record<string, unknown>
|
||||||
|
return {
|
||||||
|
id: String(tool.id || ''),
|
||||||
|
title: String(tool.title || ''),
|
||||||
|
functionName: String(fn.name || ''),
|
||||||
|
description: String(fn.description || ''),
|
||||||
|
parameters: fn.parameters,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { customTools: customToolsData } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'mcp_tools') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const tools = await mcpService.discoverTools(context.userId, workspaceId, false)
|
||||||
|
const mcpTools = tools.map((tool) => ({
|
||||||
|
name: String(tool.name || ''),
|
||||||
|
serverId: String(tool.serverId || ''),
|
||||||
|
serverName: String(tool.serverName || ''),
|
||||||
|
description: String(tool.description || ''),
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
return { success: true, output: { mcpTools } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'files') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const files = await listWorkspaceFiles(workspaceId)
|
||||||
|
const fileResults = files.map((file) => ({
|
||||||
|
id: String(file.id || ''),
|
||||||
|
name: String(file.name || ''),
|
||||||
|
key: String(file.key || ''),
|
||||||
|
path: String(file.path || ''),
|
||||||
|
size: Number(file.size || 0),
|
||||||
|
type: String(file.type || ''),
|
||||||
|
uploadedAt: String(file.uploadedAt || ''),
|
||||||
|
}))
|
||||||
|
return { success: true, output: { files: fileResults } }
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: `Unknown data_type: ${dataType}` }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetBlockOutputs(
|
||||||
|
params: GetBlockOutputsParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalized.blocks || {}
|
||||||
|
const loops = normalized.loops || {}
|
||||||
|
const parallels = normalized.parallels || {}
|
||||||
|
const blockIds =
|
||||||
|
Array.isArray(params.blockIds) && params.blockIds.length > 0
|
||||||
|
? params.blockIds
|
||||||
|
: Object.keys(blocks)
|
||||||
|
|
||||||
|
const results: Array<{
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
outputs: string[]
|
||||||
|
insideSubflowOutputs?: string[]
|
||||||
|
outsideSubflowOutputs?: string[]
|
||||||
|
triggerMode?: boolean
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
for (const blockId of blockIds) {
|
||||||
|
const block = blocks[blockId]
|
||||||
|
if (!block?.type) continue
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const insidePaths = getSubflowInsidePaths(block.type, blockId, loops, parallels)
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: [],
|
||||||
|
insideSubflowOutputs: formatOutputsWithPrefix(insidePaths, blockName),
|
||||||
|
outsideSubflowOutputs: formatOutputsWithPrefix(['results'], blockName),
|
||||||
|
triggerMode: block.triggerMode,
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputs = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: formatOutputsWithPrefix(outputs, blockName),
|
||||||
|
triggerMode: block.triggerMode,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const variables = await getWorkflowVariablesForTool(workflowId)
|
||||||
|
|
||||||
|
const payload = { blocks: results, variables }
|
||||||
|
return { success: true, output: payload }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetBlockUpstreamReferences(
|
||||||
|
params: GetBlockUpstreamReferencesParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!Array.isArray(params.blockIds) || params.blockIds.length === 0) {
|
||||||
|
return { success: false, error: 'blockIds array is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalized.blocks || {}
|
||||||
|
const edges = normalized.edges || []
|
||||||
|
const loops = normalized.loops || {}
|
||||||
|
const parallels = normalized.parallels || {}
|
||||||
|
|
||||||
|
const graphEdges = edges.map((edge) => ({ source: edge.source, target: edge.target }))
|
||||||
|
const variableOutputs = await getWorkflowVariablesForTool(workflowId)
|
||||||
|
|
||||||
|
interface AccessibleBlockEntry {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
outputs: string[]
|
||||||
|
triggerMode?: boolean
|
||||||
|
accessContext?: 'inside' | 'outside'
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpstreamReferenceResult {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
accessibleBlocks: AccessibleBlockEntry[]
|
||||||
|
insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }>
|
||||||
|
variables: Array<{ id: string; name: string; type: string; tag: string }>
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: UpstreamReferenceResult[] = []
|
||||||
|
|
||||||
|
for (const blockId of params.blockIds) {
|
||||||
|
const targetBlock = blocks[blockId]
|
||||||
|
if (!targetBlock) continue
|
||||||
|
|
||||||
|
const insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }> = []
|
||||||
|
const containingLoopIds = new Set<string>()
|
||||||
|
const containingParallelIds = new Set<string>()
|
||||||
|
|
||||||
|
Object.values(loops).forEach((loop) => {
|
||||||
|
if (loop?.nodes?.includes(blockId)) {
|
||||||
|
containingLoopIds.add(loop.id)
|
||||||
|
const loopBlock = blocks[loop.id]
|
||||||
|
if (loopBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: loop.id,
|
||||||
|
blockName: loopBlock.name || loopBlock.type,
|
||||||
|
blockType: 'loop',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Object.values(parallels).forEach((parallel) => {
|
||||||
|
if (parallel?.nodes?.includes(blockId)) {
|
||||||
|
containingParallelIds.add(parallel.id)
|
||||||
|
const parallelBlock = blocks[parallel.id]
|
||||||
|
if (parallelBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: parallel.id,
|
||||||
|
blockName: parallelBlock.name || parallelBlock.type,
|
||||||
|
blockType: 'parallel',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const ancestorIds = BlockPathCalculator.findAllPathNodes(graphEdges, blockId)
|
||||||
|
const accessibleIds = new Set<string>(ancestorIds)
|
||||||
|
accessibleIds.add(blockId)
|
||||||
|
|
||||||
|
const starterBlock = Object.values(blocks).find((b) => isInputDefinitionTrigger(b.type))
|
||||||
|
if (starterBlock && ancestorIds.includes(starterBlock.id)) {
|
||||||
|
accessibleIds.add(starterBlock.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
containingLoopIds.forEach((loopId) => {
|
||||||
|
accessibleIds.add(loopId)
|
||||||
|
loops[loopId]?.nodes?.forEach((nodeId: string) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
containingParallelIds.forEach((parallelId) => {
|
||||||
|
accessibleIds.add(parallelId)
|
||||||
|
parallels[parallelId]?.nodes?.forEach((nodeId: string) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
const accessibleBlocks: AccessibleBlockEntry[] = []
|
||||||
|
|
||||||
|
for (const accessibleBlockId of accessibleIds) {
|
||||||
|
const block = blocks[accessibleBlockId]
|
||||||
|
if (!block?.type) continue
|
||||||
|
const canSelfReference = block.type === 'approval' || block.type === 'human_in_the_loop'
|
||||||
|
if (accessibleBlockId === blockId && !canSelfReference) continue
|
||||||
|
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
let accessContext: 'inside' | 'outside' | undefined
|
||||||
|
let outputPaths: string[]
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const isInside =
|
||||||
|
(block.type === 'loop' && containingLoopIds.has(accessibleBlockId)) ||
|
||||||
|
(block.type === 'parallel' && containingParallelIds.has(accessibleBlockId))
|
||||||
|
accessContext = isInside ? 'inside' : 'outside'
|
||||||
|
outputPaths = isInside
|
||||||
|
? getSubflowInsidePaths(block.type, accessibleBlockId, loops, parallels)
|
||||||
|
: ['results']
|
||||||
|
} else {
|
||||||
|
outputPaths = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
const formattedOutputs = formatOutputsWithPrefix(outputPaths, blockName)
|
||||||
|
const entry: AccessibleBlockEntry = {
|
||||||
|
blockId: accessibleBlockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: formattedOutputs,
|
||||||
|
...(block.triggerMode ? { triggerMode: true } : {}),
|
||||||
|
...(accessContext ? { accessContext } : {}),
|
||||||
|
}
|
||||||
|
accessibleBlocks.push(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName: targetBlock.name || targetBlock.type,
|
||||||
|
blockType: targetBlock.type,
|
||||||
|
accessibleBlocks,
|
||||||
|
insideSubflows,
|
||||||
|
variables: variableOutputs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = { results }
|
||||||
|
return { success: true, output: payload }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getWorkflowVariablesForTool(
|
||||||
|
workflowId: string
|
||||||
|
): Promise<Array<{ id: string; name: string; type: string; tag: string }>> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const variablesRecord = (workflowRecord?.variables as Record<string, unknown>) || {}
|
||||||
|
return Object.values(variablesRecord)
|
||||||
|
.filter((v): v is Record<string, unknown> => {
|
||||||
|
if (!v || typeof v !== 'object') return false
|
||||||
|
const variable = v as Record<string, unknown>
|
||||||
|
return !!variable.name && String(variable.name).trim() !== ''
|
||||||
|
})
|
||||||
|
.map((v) => ({
|
||||||
|
id: String(v.id || ''),
|
||||||
|
name: String(v.name || ''),
|
||||||
|
type: String(v.type || 'plain'),
|
||||||
|
tag: `variable.${normalizeName(String(v.name || ''))}`,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSubflowInsidePaths(
|
||||||
|
blockType: 'loop' | 'parallel',
|
||||||
|
blockId: string,
|
||||||
|
loops: Record<string, Loop>,
|
||||||
|
parallels: Record<string, Parallel>
|
||||||
|
): string[] {
|
||||||
|
const paths = ['index']
|
||||||
|
if (blockType === 'loop') {
|
||||||
|
const loopType = loops[blockId]?.loopType || 'for'
|
||||||
|
if (loopType === 'forEach') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const parallelType = parallels[blockId]?.parallelType || 'count'
|
||||||
|
if (parallelType === 'collection') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatOutputsWithPrefix(paths: string[], blockName: string): string[] {
|
||||||
|
const normalizedName = normalizeName(blockName)
|
||||||
|
return paths.map((path) => `${normalizedName}.${path}`)
|
||||||
|
}
|
||||||
150
apps/sim/lib/copilot/orchestrator/types.ts
Normal file
150
apps/sim/lib/copilot/orchestrator/types.ts
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
|
|
||||||
|
export type SSEEventType =
|
||||||
|
| 'chat_id'
|
||||||
|
| 'title_updated'
|
||||||
|
| 'content'
|
||||||
|
| 'reasoning'
|
||||||
|
| 'tool_call'
|
||||||
|
| 'tool_generating'
|
||||||
|
| 'tool_result'
|
||||||
|
| 'tool_error'
|
||||||
|
| 'subagent_start'
|
||||||
|
| 'subagent_end'
|
||||||
|
| 'structured_result'
|
||||||
|
| 'subagent_result'
|
||||||
|
| 'done'
|
||||||
|
| 'error'
|
||||||
|
| 'start'
|
||||||
|
|
||||||
|
export interface SSEEvent {
|
||||||
|
type: SSEEventType
|
||||||
|
data?: Record<string, unknown>
|
||||||
|
subagent?: string
|
||||||
|
toolCallId?: string
|
||||||
|
toolName?: string
|
||||||
|
success?: boolean
|
||||||
|
result?: unknown
|
||||||
|
/** Set on chat_id events */
|
||||||
|
chatId?: string
|
||||||
|
/** Set on title_updated events */
|
||||||
|
title?: string
|
||||||
|
/** Set on error events */
|
||||||
|
error?: string
|
||||||
|
/** Set on content/reasoning events */
|
||||||
|
content?: string
|
||||||
|
/** Set on reasoning events */
|
||||||
|
phase?: string
|
||||||
|
/** Set on tool_result events */
|
||||||
|
failedDependency?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ToolCallStatus = 'pending' | 'executing' | 'success' | 'error' | 'skipped' | 'rejected'
|
||||||
|
|
||||||
|
export interface ToolCallState {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
status: ToolCallStatus
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
result?: ToolCallResult
|
||||||
|
error?: string
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolCallResult<T = unknown> {
|
||||||
|
success: boolean
|
||||||
|
output?: T
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ContentBlockType = 'text' | 'thinking' | 'tool_call' | 'subagent_text'
|
||||||
|
|
||||||
|
export interface ContentBlock {
|
||||||
|
type: ContentBlockType
|
||||||
|
content?: string
|
||||||
|
toolCall?: ToolCallState
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StreamingContext {
|
||||||
|
chatId?: string
|
||||||
|
conversationId?: string
|
||||||
|
messageId: string
|
||||||
|
accumulatedContent: string
|
||||||
|
contentBlocks: ContentBlock[]
|
||||||
|
toolCalls: Map<string, ToolCallState>
|
||||||
|
currentThinkingBlock: ContentBlock | null
|
||||||
|
isInThinkingBlock: boolean
|
||||||
|
subAgentParentToolCallId?: string
|
||||||
|
subAgentContent: Record<string, string>
|
||||||
|
subAgentToolCalls: Record<string, ToolCallState[]>
|
||||||
|
pendingContent: string
|
||||||
|
streamComplete: boolean
|
||||||
|
wasAborted: boolean
|
||||||
|
errors: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileAttachment {
|
||||||
|
id: string
|
||||||
|
key: string
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorRequest {
|
||||||
|
message: string
|
||||||
|
workflowId: string
|
||||||
|
userId: string
|
||||||
|
chatId?: string
|
||||||
|
mode?: 'agent' | 'ask' | 'plan'
|
||||||
|
model?: string
|
||||||
|
conversationId?: string
|
||||||
|
contexts?: Array<{ type: string; content: string }>
|
||||||
|
fileAttachments?: FileAttachment[]
|
||||||
|
commands?: string[]
|
||||||
|
provider?: CopilotProviderConfig
|
||||||
|
streamToolCalls?: boolean
|
||||||
|
version?: string
|
||||||
|
prefetch?: boolean
|
||||||
|
userName?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorOptions {
|
||||||
|
autoExecuteTools?: boolean
|
||||||
|
timeout?: number
|
||||||
|
onEvent?: (event: SSEEvent) => void | Promise<void>
|
||||||
|
onComplete?: (result: OrchestratorResult) => void | Promise<void>
|
||||||
|
onError?: (error: Error) => void | Promise<void>
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
interactive?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorResult {
|
||||||
|
success: boolean
|
||||||
|
content: string
|
||||||
|
contentBlocks: ContentBlock[]
|
||||||
|
toolCalls: ToolCallSummary[]
|
||||||
|
chatId?: string
|
||||||
|
conversationId?: string
|
||||||
|
error?: string
|
||||||
|
errors?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolCallSummary {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
status: ToolCallStatus
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
durationMs?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExecutionContext {
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
workspaceId?: string
|
||||||
|
decryptedEnvVars?: Record<string, string>
|
||||||
|
}
|
||||||
@@ -44,29 +44,29 @@ export async function processContexts(
|
|||||||
ctx.kind
|
ctx.kind
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||||
return await processKnowledgeFromDb(
|
return await processKnowledgeFromDb(
|
||||||
(ctx as any).knowledgeId,
|
ctx.knowledgeId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||||
return await processBlockMetadata((ctx as any).blockId, ctx.label ? `@${ctx.label}` : '@')
|
return await processBlockMetadata(ctx.blockIds[0], ctx.label ? `@${ctx.label}` : '@')
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||||
return await processTemplateFromDb(
|
return await processTemplateFromDb(
|
||||||
(ctx as any).templateId,
|
ctx.templateId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||||
return await processExecutionLogFromDb(
|
return await processExecutionLogFromDb(
|
||||||
(ctx as any).executionId,
|
ctx.executionId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||||
}
|
}
|
||||||
// Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs
|
// Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs
|
||||||
return null
|
return null
|
||||||
@@ -99,33 +99,33 @@ export async function processContextsServer(
|
|||||||
ctx.kind
|
ctx.kind
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||||
return await processKnowledgeFromDb(
|
return await processKnowledgeFromDb(
|
||||||
(ctx as any).knowledgeId,
|
ctx.knowledgeId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||||
return await processBlockMetadata(
|
return await processBlockMetadata(
|
||||||
(ctx as any).blockId,
|
ctx.blockIds[0],
|
||||||
ctx.label ? `@${ctx.label}` : '@',
|
ctx.label ? `@${ctx.label}` : '@',
|
||||||
userId
|
userId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||||
return await processTemplateFromDb(
|
return await processTemplateFromDb(
|
||||||
(ctx as any).templateId,
|
ctx.templateId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||||
return await processExecutionLogFromDb(
|
return await processExecutionLogFromDb(
|
||||||
(ctx as any).executionId,
|
ctx.executionId,
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
ctx.label ? `@${ctx.label}` : '@'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'docs') {
|
if (ctx.kind === 'docs') {
|
||||||
try {
|
try {
|
||||||
|
|||||||
195
apps/sim/lib/copilot/store-utils.ts
Normal file
195
apps/sim/lib/copilot/store-utils.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { Loader2 } from 'lucide-react'
|
||||||
|
import {
|
||||||
|
ClientToolCallState,
|
||||||
|
type ClientToolDisplay,
|
||||||
|
TOOL_DISPLAY_REGISTRY,
|
||||||
|
} from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotStore } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStoreUtils')
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export function resolveToolDisplay(
|
||||||
|
toolName: string | undefined,
|
||||||
|
state: ClientToolCallState,
|
||||||
|
_toolCallId?: string,
|
||||||
|
params?: Record<string, any>
|
||||||
|
): ClientToolDisplay | undefined {
|
||||||
|
if (!toolName) return undefined
|
||||||
|
const entry = TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
|
if (!entry) return humanizedFallback(toolName, state)
|
||||||
|
|
||||||
|
if (entry.uiConfig?.dynamicText && params) {
|
||||||
|
const dynamicText = entry.uiConfig.dynamicText(params, state)
|
||||||
|
const stateDisplay = entry.displayNames[state]
|
||||||
|
if (dynamicText && stateDisplay?.icon) {
|
||||||
|
return { text: dynamicText, icon: stateDisplay.icon }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const display = entry.displayNames[state]
|
||||||
|
if (display?.text || display?.icon) return display
|
||||||
|
|
||||||
|
const fallbackOrder = [
|
||||||
|
ClientToolCallState.generating,
|
||||||
|
ClientToolCallState.executing,
|
||||||
|
ClientToolCallState.success,
|
||||||
|
]
|
||||||
|
for (const fallbackState of fallbackOrder) {
|
||||||
|
const fallback = entry.displayNames[fallbackState]
|
||||||
|
if (fallback?.text || fallback?.icon) return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
return humanizedFallback(toolName, state)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function humanizedFallback(
|
||||||
|
toolName: string,
|
||||||
|
state: ClientToolCallState
|
||||||
|
): ClientToolDisplay | undefined {
|
||||||
|
const formattedName = toolName.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase())
|
||||||
|
const stateVerb =
|
||||||
|
state === ClientToolCallState.success
|
||||||
|
? 'Executed'
|
||||||
|
: state === ClientToolCallState.error
|
||||||
|
? 'Failed'
|
||||||
|
: state === ClientToolCallState.rejected || state === ClientToolCallState.aborted
|
||||||
|
? 'Skipped'
|
||||||
|
: 'Executing'
|
||||||
|
return { text: `${stateVerb} ${formattedName}`, icon: Loader2 }
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isRejectedState(state: string): boolean {
|
||||||
|
return state === 'rejected'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isReviewState(state: string): boolean {
|
||||||
|
return state === 'review'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isBackgroundState(state: string): boolean {
|
||||||
|
return state === 'background'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isTerminalState(state: string): boolean {
|
||||||
|
return (
|
||||||
|
state === ClientToolCallState.success ||
|
||||||
|
state === ClientToolCallState.error ||
|
||||||
|
state === ClientToolCallState.rejected ||
|
||||||
|
state === ClientToolCallState.aborted ||
|
||||||
|
isReviewState(state) ||
|
||||||
|
isBackgroundState(state)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function abortAllInProgressTools(
|
||||||
|
set: StoreSet,
|
||||||
|
get: () => CopilotStore
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const { toolCallsById, messages } = get()
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
const abortedIds = new Set<string>()
|
||||||
|
let hasUpdates = false
|
||||||
|
for (const [id, tc] of Object.entries(toolCallsById)) {
|
||||||
|
const st = tc.state
|
||||||
|
const isTerminal =
|
||||||
|
st === ClientToolCallState.success ||
|
||||||
|
st === ClientToolCallState.error ||
|
||||||
|
st === ClientToolCallState.rejected ||
|
||||||
|
st === ClientToolCallState.aborted
|
||||||
|
if (!isTerminal || isReviewState(st)) {
|
||||||
|
abortedIds.add(id)
|
||||||
|
updatedMap[id] = {
|
||||||
|
...tc,
|
||||||
|
state: ClientToolCallState.aborted,
|
||||||
|
subAgentStreaming: false,
|
||||||
|
display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, tc.params),
|
||||||
|
}
|
||||||
|
hasUpdates = true
|
||||||
|
} else if (tc.subAgentStreaming) {
|
||||||
|
updatedMap[id] = {
|
||||||
|
...tc,
|
||||||
|
subAgentStreaming: false,
|
||||||
|
}
|
||||||
|
hasUpdates = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (abortedIds.size > 0 || hasUpdates) {
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
set((s: CopilotStore) => {
|
||||||
|
const msgs = [...s.messages]
|
||||||
|
for (let mi = msgs.length - 1; mi >= 0; mi--) {
|
||||||
|
const m = msgs[mi]
|
||||||
|
if (m.role !== 'assistant' || !Array.isArray(m.contentBlocks)) continue
|
||||||
|
let changed = false
|
||||||
|
const blocks = m.contentBlocks.map((b: any) => {
|
||||||
|
if (b?.type === 'tool_call' && b.toolCall?.id && abortedIds.has(b.toolCall.id)) {
|
||||||
|
changed = true
|
||||||
|
const prev = b.toolCall
|
||||||
|
return {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...prev,
|
||||||
|
state: ClientToolCallState.aborted,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
prev?.name,
|
||||||
|
ClientToolCallState.aborted,
|
||||||
|
prev?.id,
|
||||||
|
prev?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
})
|
||||||
|
if (changed) {
|
||||||
|
msgs[mi] = { ...m, contentBlocks: blocks }
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { messages: msgs }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to abort in-progress tools', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function cleanupActiveState(
|
||||||
|
set: (partial: Record<string, unknown>) => void,
|
||||||
|
get: () => Record<string, unknown>
|
||||||
|
): void {
|
||||||
|
abortAllInProgressTools(
|
||||||
|
set as unknown as StoreSet,
|
||||||
|
get as unknown as () => CopilotStore
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
const { useWorkflowDiffStore } = require('@/stores/workflow-diff/store') as {
|
||||||
|
useWorkflowDiffStore: {
|
||||||
|
getState: () => { clearDiff: (options?: { restoreBaseline?: boolean }) => void }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to clear diff during cleanup', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripTodoTags(text: string): string {
|
||||||
|
if (!text) return text
|
||||||
|
return text
|
||||||
|
.replace(/<marktodo>[\s\S]*?<\/marktodo>/g, '')
|
||||||
|
.replace(/<checkofftodo>[\s\S]*?<\/checkofftodo>/g, '')
|
||||||
|
.replace(/<design_workflow>[\s\S]*?<\/design_workflow>/g, '')
|
||||||
|
.replace(/[ \t]+\n/g, '\n')
|
||||||
|
.replace(/\n{2,}/g, '\n')
|
||||||
|
}
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
/**
|
|
||||||
* Base class for subagent tools.
|
|
||||||
*
|
|
||||||
* Subagent tools spawn a server-side subagent that does the actual work.
|
|
||||||
* The tool auto-executes and the subagent's output is streamed back
|
|
||||||
* as nested content under the tool call.
|
|
||||||
*
|
|
||||||
* Examples: edit, plan, debug, evaluate, research, etc.
|
|
||||||
*/
|
|
||||||
import type { LucideIcon } from 'lucide-react'
|
|
||||||
import { BaseClientTool, type BaseClientToolMetadata, ClientToolCallState } from './base-tool'
|
|
||||||
import type { SubagentConfig, ToolUIConfig } from './ui-config'
|
|
||||||
import { registerToolUIConfig } from './ui-config'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for creating a subagent tool
|
|
||||||
*/
|
|
||||||
export interface SubagentToolConfig {
|
|
||||||
/** Unique tool ID */
|
|
||||||
id: string
|
|
||||||
/** Display names per state */
|
|
||||||
displayNames: {
|
|
||||||
streaming: { text: string; icon: LucideIcon }
|
|
||||||
success: { text: string; icon: LucideIcon }
|
|
||||||
error: { text: string; icon: LucideIcon }
|
|
||||||
}
|
|
||||||
/** Subagent UI configuration */
|
|
||||||
subagent: SubagentConfig
|
|
||||||
/**
|
|
||||||
* Optional: Whether this is a "special" tool (gets gradient styling).
|
|
||||||
* Default: false
|
|
||||||
*/
|
|
||||||
isSpecial?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create metadata for a subagent tool from config
|
|
||||||
*/
|
|
||||||
function createSubagentMetadata(config: SubagentToolConfig): BaseClientToolMetadata {
|
|
||||||
const { displayNames, subagent, isSpecial } = config
|
|
||||||
const { streaming, success, error } = displayNames
|
|
||||||
|
|
||||||
const uiConfig: ToolUIConfig = {
|
|
||||||
isSpecial: isSpecial ?? false,
|
|
||||||
subagent,
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: streaming,
|
|
||||||
[ClientToolCallState.pending]: streaming,
|
|
||||||
[ClientToolCallState.executing]: streaming,
|
|
||||||
[ClientToolCallState.success]: success,
|
|
||||||
[ClientToolCallState.error]: error,
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: `${config.id.charAt(0).toUpperCase() + config.id.slice(1)} skipped`,
|
|
||||||
icon: error.icon,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: `${config.id.charAt(0).toUpperCase() + config.id.slice(1)} aborted`,
|
|
||||||
icon: error.icon,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
uiConfig,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base class for subagent tools.
|
|
||||||
* Extends BaseClientTool with subagent-specific behavior.
|
|
||||||
*/
|
|
||||||
export abstract class BaseSubagentTool extends BaseClientTool {
|
|
||||||
/**
|
|
||||||
* Subagent configuration.
|
|
||||||
* Override in subclasses to customize behavior.
|
|
||||||
*/
|
|
||||||
static readonly subagentConfig: SubagentToolConfig
|
|
||||||
|
|
||||||
constructor(toolCallId: string, config: SubagentToolConfig) {
|
|
||||||
super(toolCallId, config.id, createSubagentMetadata(config))
|
|
||||||
// Register UI config for this tool
|
|
||||||
registerToolUIConfig(config.id, this.metadata.uiConfig!)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the subagent tool.
|
|
||||||
* Immediately transitions to executing state - the actual work
|
|
||||||
* is done server-side by the subagent.
|
|
||||||
*/
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory function to create a subagent tool class.
|
|
||||||
* Use this for simple subagent tools that don't need custom behavior.
|
|
||||||
*/
|
|
||||||
export function createSubagentToolClass(config: SubagentToolConfig) {
|
|
||||||
// Register UI config at class creation time
|
|
||||||
const uiConfig: ToolUIConfig = {
|
|
||||||
isSpecial: config.isSpecial ?? false,
|
|
||||||
subagent: config.subagent,
|
|
||||||
}
|
|
||||||
registerToolUIConfig(config.id, uiConfig)
|
|
||||||
|
|
||||||
return class extends BaseClientTool {
|
|
||||||
static readonly id = config.id
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, config.id, createSubagentMetadata(config))
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +1,5 @@
|
|||||||
// Lazy require in setState to avoid circular init issues
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import type { LucideIcon } from 'lucide-react'
|
import type { LucideIcon } from 'lucide-react'
|
||||||
import type { ToolUIConfig } from './ui-config'
|
|
||||||
|
|
||||||
const baseToolLogger = createLogger('BaseClientTool')
|
|
||||||
|
|
||||||
const DEFAULT_TOOL_TIMEOUT_MS = 5 * 60 * 1000
|
|
||||||
|
|
||||||
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
|
||||||
|
|
||||||
// Client tool call states used by the new runtime
|
|
||||||
export enum ClientToolCallState {
|
export enum ClientToolCallState {
|
||||||
generating = 'generating',
|
generating = 'generating',
|
||||||
pending = 'pending',
|
pending = 'pending',
|
||||||
@@ -22,252 +12,32 @@ export enum ClientToolCallState {
|
|||||||
background = 'background',
|
background = 'background',
|
||||||
}
|
}
|
||||||
|
|
||||||
// Display configuration for a given state
|
|
||||||
export interface ClientToolDisplay {
|
export interface ClientToolDisplay {
|
||||||
text: string
|
text: string
|
||||||
icon: LucideIcon
|
icon: LucideIcon
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export interface BaseClientToolMetadata {
|
||||||
* Function to generate dynamic display text based on tool parameters and state
|
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
||||||
* @param params - The tool call parameters
|
uiConfig?: Record<string, unknown>
|
||||||
* @param state - The current tool call state
|
getDynamicText?: (
|
||||||
* @returns The dynamic text to display, or undefined to use the default text
|
params: Record<string, unknown>,
|
||||||
*/
|
state: ClientToolCallState
|
||||||
|
) => string | undefined
|
||||||
|
}
|
||||||
|
|
||||||
export type DynamicTextFormatter = (
|
export type DynamicTextFormatter = (
|
||||||
params: Record<string, any>,
|
params: Record<string, unknown>,
|
||||||
state: ClientToolCallState
|
state: ClientToolCallState
|
||||||
) => string | undefined
|
) => string | undefined
|
||||||
|
|
||||||
export interface BaseClientToolMetadata {
|
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
||||||
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
|
||||||
interrupt?: {
|
/** Event detail for OAuth connect events dispatched by the copilot. */
|
||||||
accept: ClientToolDisplay
|
export interface OAuthConnectEventDetail {
|
||||||
reject: ClientToolDisplay
|
providerName: string
|
||||||
}
|
serviceId: string
|
||||||
/**
|
providerId: string
|
||||||
* Optional function to generate dynamic display text based on parameters
|
requiredScopes: string[]
|
||||||
* If provided, this will override the default text in displayNames
|
newScopes?: string[]
|
||||||
*/
|
|
||||||
getDynamicText?: DynamicTextFormatter
|
|
||||||
/**
|
|
||||||
* UI configuration for how this tool renders in the tool-call component.
|
|
||||||
* This replaces hardcoded logic in tool-call.tsx with declarative config.
|
|
||||||
*/
|
|
||||||
uiConfig?: ToolUIConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
export class BaseClientTool {
|
|
||||||
readonly toolCallId: string
|
|
||||||
readonly name: string
|
|
||||||
protected state: ClientToolCallState
|
|
||||||
protected metadata: BaseClientToolMetadata
|
|
||||||
protected isMarkedComplete = false
|
|
||||||
protected timeoutMs: number = DEFAULT_TOOL_TIMEOUT_MS
|
|
||||||
|
|
||||||
constructor(toolCallId: string, name: string, metadata: BaseClientToolMetadata) {
|
|
||||||
this.toolCallId = toolCallId
|
|
||||||
this.name = name
|
|
||||||
this.metadata = metadata
|
|
||||||
this.state = ClientToolCallState.generating
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set a custom timeout for this tool (in milliseconds)
|
|
||||||
*/
|
|
||||||
setTimeoutMs(ms: number): void {
|
|
||||||
this.timeoutMs = ms
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if this tool has been marked complete
|
|
||||||
*/
|
|
||||||
hasBeenMarkedComplete(): boolean {
|
|
||||||
return this.isMarkedComplete
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensure the tool is marked complete. If not already marked, marks it with error.
|
|
||||||
* This should be called in finally blocks to prevent leaked tool calls.
|
|
||||||
*/
|
|
||||||
async ensureMarkedComplete(
|
|
||||||
fallbackMessage = 'Tool execution did not complete properly'
|
|
||||||
): Promise<void> {
|
|
||||||
if (!this.isMarkedComplete) {
|
|
||||||
baseToolLogger.warn('Tool was not marked complete, marking with error', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
state: this.state,
|
|
||||||
})
|
|
||||||
await this.markToolComplete(500, fallbackMessage)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute with timeout protection. Wraps the execution in a timeout and ensures
|
|
||||||
* markToolComplete is always called.
|
|
||||||
*/
|
|
||||||
async executeWithTimeout(executeFn: () => Promise<void>, timeoutMs?: number): Promise<void> {
|
|
||||||
const timeout = timeoutMs ?? this.timeoutMs
|
|
||||||
let timeoutId: NodeJS.Timeout | null = null
|
|
||||||
|
|
||||||
try {
|
|
||||||
await Promise.race([
|
|
||||||
executeFn(),
|
|
||||||
new Promise<never>((_, reject) => {
|
|
||||||
timeoutId = setTimeout(() => {
|
|
||||||
reject(new Error(`Tool execution timed out after ${timeout / 1000} seconds`))
|
|
||||||
}, timeout)
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
baseToolLogger.error('Tool execution failed or timed out', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
error: message,
|
|
||||||
})
|
|
||||||
// Only mark complete if not already marked
|
|
||||||
if (!this.isMarkedComplete) {
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (timeoutId) clearTimeout(timeoutId)
|
|
||||||
// Ensure tool is always marked complete
|
|
||||||
await this.ensureMarkedComplete()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Intentionally left empty - specific tools can override
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark a tool as complete on the server (proxies to server-side route).
|
|
||||||
* Once called, the tool is considered complete and won't be marked again.
|
|
||||||
*/
|
|
||||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
|
||||||
// Prevent double-marking
|
|
||||||
if (this.isMarkedComplete) {
|
|
||||||
baseToolLogger.warn('markToolComplete called but tool already marked complete', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
existingState: this.state,
|
|
||||||
attemptedStatus: status,
|
|
||||||
})
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isMarkedComplete = true
|
|
||||||
|
|
||||||
try {
|
|
||||||
baseToolLogger.info('markToolComplete called', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
state: this.state,
|
|
||||||
status,
|
|
||||||
hasMessage: message !== undefined,
|
|
||||||
hasData: data !== undefined,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: this.toolCallId,
|
|
||||||
name: this.name,
|
|
||||||
status,
|
|
||||||
message,
|
|
||||||
data,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
// Try to surface server error
|
|
||||||
let errorText = `Failed to mark tool complete (status ${res.status})`
|
|
||||||
try {
|
|
||||||
const { error } = await res.json()
|
|
||||||
if (error) errorText = String(error)
|
|
||||||
} catch {}
|
|
||||||
throw new Error(errorText)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = (await res.json()) as { success?: boolean }
|
|
||||||
return json?.success === true
|
|
||||||
} catch (e) {
|
|
||||||
// Default failure path - but tool is still marked complete locally
|
|
||||||
baseToolLogger.error('Failed to mark tool complete on server', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
error: e instanceof Error ? e.message : String(e),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Accept (continue) for interrupt flows: move pending -> executing
|
|
||||||
async handleAccept(): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reject (skip) for interrupt flows: mark complete with a standard skip message
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await this.markToolComplete(200, 'Tool execution was skipped by the user')
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the display configuration for the current state
|
|
||||||
getDisplayState(): ClientToolDisplay | undefined {
|
|
||||||
return this.metadata.displayNames[this.state]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return interrupt display config (labels/icons) if defined
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
return this.metadata.interrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transition to a new state (also sync to Copilot store)
|
|
||||||
setState(next: ClientToolCallState, options?: { result?: any }): void {
|
|
||||||
const prev = this.state
|
|
||||||
this.state = next
|
|
||||||
|
|
||||||
// Notify store via manager to avoid import cycles
|
|
||||||
try {
|
|
||||||
const { syncToolState } = require('@/lib/copilot/tools/client/manager')
|
|
||||||
syncToolState(this.toolCallId, next, options)
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
// Log transition after syncing
|
|
||||||
try {
|
|
||||||
baseToolLogger.info('setState transition', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
prev,
|
|
||||||
next,
|
|
||||||
hasResult: options?.result !== undefined,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expose current state
|
|
||||||
getState(): ClientToolCallState {
|
|
||||||
return this.state
|
|
||||||
}
|
|
||||||
|
|
||||||
hasInterrupt(): boolean {
|
|
||||||
return !!this.metadata.interrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get UI configuration for this tool.
|
|
||||||
* Used by tool-call component to determine rendering behavior.
|
|
||||||
*/
|
|
||||||
getUIConfig(): ToolUIConfig | undefined {
|
|
||||||
return this.metadata.uiConfig
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,100 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { FileCode, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockConfigInput,
|
|
||||||
GetBlockConfigResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
|
||||||
|
|
||||||
interface GetBlockConfigArgs {
|
|
||||||
blockType: string
|
|
||||||
operation?: string
|
|
||||||
trigger?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockConfigClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_block_config'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlockConfigClientTool.id, GetBlockConfigClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped getting block config',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.blockType && typeof params.blockType === 'string') {
|
|
||||||
const blockConfig = getLatestBlock(params.blockType)
|
|
||||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
|
||||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Retrieved ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Retrieving ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to retrieve ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted retrieving ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped retrieving ${blockName}${opSuffix} config`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlockConfigArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlockConfigClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockType, operation, trigger } = GetBlockConfigInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
toolName: 'get_block_config',
|
|
||||||
payload: { blockType, operation, trigger },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockConfigResult.parse(parsed.result)
|
|
||||||
|
|
||||||
const inputCount = Object.keys(result.inputs).length
|
|
||||||
const outputCount = Object.keys(result.outputs).length
|
|
||||||
await this.markToolComplete(200, { inputs: inputCount, outputs: outputCount }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockOptionsInput,
|
|
||||||
GetBlockOptionsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
|
||||||
|
|
||||||
interface GetBlockOptionsArgs {
|
|
||||||
blockId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockOptionsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_block_options'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlockOptionsClientTool.id, GetBlockOptionsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped getting block operations',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const blockId =
|
|
||||||
(params as any)?.blockId ||
|
|
||||||
(params as any)?.blockType ||
|
|
||||||
(params as any)?.block_id ||
|
|
||||||
(params as any)?.block_type
|
|
||||||
if (typeof blockId === 'string') {
|
|
||||||
const blockConfig = getLatestBlock(blockId)
|
|
||||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Retrieved ${blockName} operations`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Retrieving ${blockName} operations`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to retrieve ${blockName} operations`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted retrieving ${blockName} operations`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped retrieving ${blockName} operations`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlockOptionsArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlockOptionsClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Handle both camelCase and snake_case parameter names, plus blockType as an alias
|
|
||||||
const normalizedArgs = args
|
|
||||||
? {
|
|
||||||
blockId:
|
|
||||||
args.blockId ||
|
|
||||||
(args as any).block_id ||
|
|
||||||
(args as any).blockType ||
|
|
||||||
(args as any).block_type,
|
|
||||||
}
|
|
||||||
: {}
|
|
||||||
|
|
||||||
logger.info('execute called', { originalArgs: args, normalizedArgs })
|
|
||||||
|
|
||||||
const { blockId } = GetBlockOptionsInput.parse(normalizedArgs)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_block_options', payload: { blockId } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockOptionsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { operations: result.operations.length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Blocks, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksAndToolsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_blocks_and_tools'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlocksAndToolsClientTool.id, GetBlocksAndToolsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlocksAndToolsClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_and_tools', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksAndToolsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved blocks and tools', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksMetadataInput,
|
|
||||||
GetBlocksMetadataResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface GetBlocksMetadataArgs {
|
|
||||||
blockIds: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlocksMetadataClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_blocks_metadata'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlocksMetadataClientTool.id, GetBlocksMetadataClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Searched block choices', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to search block choices', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted searching block choices', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped searching block choices',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.blockIds && Array.isArray(params.blockIds) && params.blockIds.length > 0) {
|
|
||||||
const blockList = params.blockIds
|
|
||||||
.slice(0, 3)
|
|
||||||
.map((blockId) => blockId.replace(/_/g, ' '))
|
|
||||||
.join(', ')
|
|
||||||
const more = params.blockIds.length > 3 ? '...' : ''
|
|
||||||
const blocks = `${blockList}${more}`
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Searched ${blocks}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching ${blocks}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to search ${blocks}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching ${blocks}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching ${blocks}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlocksMetadataArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlocksMetadataClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockIds } = GetBlocksMetadataInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_metadata', payload: { blockIds } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksMetadataResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { retrieved: Object.keys(result.metadata).length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetTriggerBlocksResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetTriggerBlocksClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_trigger_blocks'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetTriggerBlocksClientTool.id, GetTriggerBlocksClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Found trigger blocks', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to find trigger blocks', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted finding trigger blocks', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped finding trigger blocks', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
const logger = createLogger('GetTriggerBlocksClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_trigger_blocks', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
try {
|
|
||||||
const errorJson = JSON.parse(errorText)
|
|
||||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
|
||||||
} catch {
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetTriggerBlocksResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved trigger blocks', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, Search, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetExamplesRagClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_examples_rag'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetExamplesRagClientTool.id, GetExamplesRagClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Fetched examples', icon: Search },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to fetch examples', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting examples', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped getting examples', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Found examples for ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching examples for ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to find examples for ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching examples for ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching examples for ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetOperationsExamplesClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_operations_examples'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetOperationsExamplesClientTool.id, GetOperationsExamplesClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Designed workflow component', icon: Zap },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to design workflow component', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted designing workflow component',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped designing workflow component',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Designed ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Designing ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to design ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted designing ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped designing ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetTriggerExamplesClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_trigger_examples'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetTriggerExamplesClientTool.id, GetTriggerExamplesClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Selected a trigger', icon: Zap },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to select a trigger', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted selecting a trigger', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped selecting a trigger', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, PencilLine, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SummarizeClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'summarize_conversation'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SummarizeClientTool.id, SummarizeClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Summarized conversation', icon: PencilLine },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to summarize conversation', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted summarizing conversation',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped summarizing conversation',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
/**
|
|
||||||
* Initialize all tool UI configurations.
|
|
||||||
*
|
|
||||||
* This module imports all client tools to trigger their UI config registration.
|
|
||||||
* Import this module early in the app to ensure all tool configs are available.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Other tools (subagents)
|
|
||||||
import './other/auth'
|
|
||||||
import './other/custom-tool'
|
|
||||||
import './other/debug'
|
|
||||||
import './other/deploy'
|
|
||||||
import './other/edit'
|
|
||||||
import './other/evaluate'
|
|
||||||
import './other/info'
|
|
||||||
import './other/knowledge'
|
|
||||||
import './other/make-api-request'
|
|
||||||
import './other/plan'
|
|
||||||
import './other/research'
|
|
||||||
import './other/sleep'
|
|
||||||
import './other/superagent'
|
|
||||||
import './other/test'
|
|
||||||
import './other/tour'
|
|
||||||
import './other/workflow'
|
|
||||||
|
|
||||||
// Workflow tools
|
|
||||||
import './workflow/deploy-api'
|
|
||||||
import './workflow/deploy-chat'
|
|
||||||
import './workflow/deploy-mcp'
|
|
||||||
import './workflow/edit-workflow'
|
|
||||||
import './workflow/redeploy'
|
|
||||||
import './workflow/run-workflow'
|
|
||||||
import './workflow/set-global-workflow-variables'
|
|
||||||
|
|
||||||
// User tools
|
|
||||||
import './user/set-environment-variables'
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Database, Loader2, MinusCircle, PlusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
type KnowledgeBaseArgs,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Client tool for knowledge base operations
|
|
||||||
*/
|
|
||||||
export class KnowledgeBaseClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'knowledge_base'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, KnowledgeBaseClientTool.id, KnowledgeBaseClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Only show interrupt for create operation
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as KnowledgeBaseArgs | undefined
|
|
||||||
|
|
||||||
// Only require confirmation for create operation
|
|
||||||
if (params?.operation === 'create') {
|
|
||||||
const name = params?.args?.name || 'new knowledge base'
|
|
||||||
return {
|
|
||||||
accept: { text: `Create "${name}"`, icon: PlusCircle },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No interrupt for list, get, query - auto-execute
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Accessed knowledge base', icon: Database },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to access knowledge base', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted knowledge base access', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped knowledge base access', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params: Record<string, any>, state: ClientToolCallState) => {
|
|
||||||
const operation = params?.operation as string | undefined
|
|
||||||
const name = params?.args?.name as string | undefined
|
|
||||||
|
|
||||||
const opVerbs: Record<string, { active: string; past: string; pending?: string }> = {
|
|
||||||
create: {
|
|
||||||
active: 'Creating knowledge base',
|
|
||||||
past: 'Created knowledge base',
|
|
||||||
pending: name ? `Create knowledge base "${name}"?` : 'Create knowledge base?',
|
|
||||||
},
|
|
||||||
list: { active: 'Listing knowledge bases', past: 'Listed knowledge bases' },
|
|
||||||
get: { active: 'Getting knowledge base', past: 'Retrieved knowledge base' },
|
|
||||||
query: { active: 'Querying knowledge base', past: 'Queried knowledge base' },
|
|
||||||
}
|
|
||||||
const defaultVerb: { active: string; past: string; pending?: string } = {
|
|
||||||
active: 'Accessing knowledge base',
|
|
||||||
past: 'Accessed knowledge base',
|
|
||||||
}
|
|
||||||
const verb = operation ? opVerbs[operation] || defaultVerb : defaultVerb
|
|
||||||
|
|
||||||
if (state === ClientToolCallState.success) {
|
|
||||||
return verb.past
|
|
||||||
}
|
|
||||||
if (state === ClientToolCallState.pending && verb.pending) {
|
|
||||||
return verb.pending
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
state === ClientToolCallState.generating ||
|
|
||||||
state === ClientToolCallState.pending ||
|
|
||||||
state === ClientToolCallState.executing
|
|
||||||
) {
|
|
||||||
return verb.active
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
await this.execute(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
const logger = createLogger('KnowledgeBaseClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Get the workspace ID from the workflow registry hydration state
|
|
||||||
const { hydration } = useWorkflowRegistry.getState()
|
|
||||||
const workspaceId = hydration.workspaceId
|
|
||||||
|
|
||||||
// Build payload with workspace ID included in args
|
|
||||||
const payload: KnowledgeBaseArgs = {
|
|
||||||
...(args || { operation: 'list' }),
|
|
||||||
args: {
|
|
||||||
...(args?.args || {}),
|
|
||||||
workspaceId: workspaceId || undefined,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'knowledge_base', payload }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Knowledge base operation completed', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to access knowledge base')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
const instances: Record<string, any> = {}
|
|
||||||
|
|
||||||
let syncStateFn: ((toolCallId: string, nextState: any, options?: { result?: any }) => void) | null =
|
|
||||||
null
|
|
||||||
|
|
||||||
export function registerClientTool(toolCallId: string, instance: any) {
|
|
||||||
instances[toolCallId] = instance
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getClientTool(toolCallId: string): any | undefined {
|
|
||||||
return instances[toolCallId]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerToolStateSync(
|
|
||||||
fn: (toolCallId: string, nextState: any, options?: { result?: any }) => void
|
|
||||||
) {
|
|
||||||
syncStateFn = fn
|
|
||||||
}
|
|
||||||
|
|
||||||
export function syncToolState(toolCallId: string, nextState: any, options?: { result?: any }) {
|
|
||||||
try {
|
|
||||||
syncStateFn?.(toolCallId, nextState, options)
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
@@ -1,241 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Navigation, X, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
type NavigationDestination = 'workflow' | 'logs' | 'templates' | 'vector_db' | 'settings'
|
|
||||||
|
|
||||||
interface NavigateUIArgs {
|
|
||||||
destination: NavigationDestination
|
|
||||||
workflowName?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class NavigateUIClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'navigate_ui'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, NavigateUIClientTool.id, NavigateUIClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override to provide dynamic button text based on destination
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
|
|
||||||
const destination = params?.destination
|
|
||||||
const workflowName = params?.workflowName
|
|
||||||
|
|
||||||
let buttonText = 'Navigate'
|
|
||||||
|
|
||||||
if (destination === 'workflow' && workflowName) {
|
|
||||||
buttonText = 'Open workflow'
|
|
||||||
} else if (destination === 'logs') {
|
|
||||||
buttonText = 'Open logs'
|
|
||||||
} else if (destination === 'templates') {
|
|
||||||
buttonText = 'Open templates'
|
|
||||||
} else if (destination === 'vector_db') {
|
|
||||||
buttonText = 'Open vector DB'
|
|
||||||
} else if (destination === 'settings') {
|
|
||||||
buttonText = 'Open settings'
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
accept: { text: buttonText, icon: Navigation },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: {
|
|
||||||
text: 'Preparing to open',
|
|
||||||
icon: Loader2,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.pending]: { text: 'Open?', icon: Navigation },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Opening', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Opened', icon: Navigation },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to open', icon: X },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted opening',
|
|
||||||
icon: XCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped opening',
|
|
||||||
icon: XCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: {
|
|
||||||
accept: { text: 'Open', icon: Navigation },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const destination = params?.destination as NavigationDestination | undefined
|
|
||||||
const workflowName = params?.workflowName
|
|
||||||
|
|
||||||
const action = 'open'
|
|
||||||
const actionCapitalized = 'Open'
|
|
||||||
const actionPast = 'opened'
|
|
||||||
const actionIng = 'opening'
|
|
||||||
let target = ''
|
|
||||||
|
|
||||||
if (destination === 'workflow' && workflowName) {
|
|
||||||
target = ` workflow "${workflowName}"`
|
|
||||||
} else if (destination === 'workflow') {
|
|
||||||
target = ' workflows'
|
|
||||||
} else if (destination === 'logs') {
|
|
||||||
target = ' logs'
|
|
||||||
} else if (destination === 'templates') {
|
|
||||||
target = ' templates'
|
|
||||||
} else if (destination === 'vector_db') {
|
|
||||||
target = ' vector database'
|
|
||||||
} else if (destination === 'settings') {
|
|
||||||
target = ' settings'
|
|
||||||
}
|
|
||||||
|
|
||||||
const fullAction = `${action}${target}`
|
|
||||||
const fullActionCapitalized = `${actionCapitalized}${target}`
|
|
||||||
const fullActionPast = `${actionPast}${target}`
|
|
||||||
const fullActionIng = `${actionIng}${target}`
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return fullActionPast.charAt(0).toUpperCase() + fullActionPast.slice(1)
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
return fullActionIng.charAt(0).toUpperCase() + fullActionIng.slice(1)
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
return `Preparing to ${fullAction}`
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `${fullActionCapitalized}?`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to ${fullAction}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted ${fullAction}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped ${fullAction}`
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: NavigateUIArgs): Promise<void> {
|
|
||||||
const logger = createLogger('NavigateUIClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Get params from copilot store if not provided directly
|
|
||||||
let destination = args?.destination
|
|
||||||
let workflowName = args?.workflowName
|
|
||||||
|
|
||||||
if (!destination) {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
destination = params?.destination
|
|
||||||
workflowName = params?.workflowName
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!destination) {
|
|
||||||
throw new Error('No destination provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
let navigationUrl = ''
|
|
||||||
let successMessage = ''
|
|
||||||
|
|
||||||
// Get current workspace ID from URL
|
|
||||||
const workspaceId = window.location.pathname.split('/')[2]
|
|
||||||
|
|
||||||
switch (destination) {
|
|
||||||
case 'workflow':
|
|
||||||
if (workflowName) {
|
|
||||||
// Find workflow by name
|
|
||||||
const { workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflow = Object.values(workflows).find(
|
|
||||||
(w) => w.name.toLowerCase() === workflowName.toLowerCase()
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!workflow) {
|
|
||||||
throw new Error(`Workflow "${workflowName}" not found`)
|
|
||||||
}
|
|
||||||
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/w/${workflow.id}`
|
|
||||||
successMessage = `Navigated to workflow "${workflowName}"`
|
|
||||||
} else {
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/w`
|
|
||||||
successMessage = 'Navigated to workflows'
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'logs':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/logs`
|
|
||||||
successMessage = 'Navigated to logs'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'templates':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/templates`
|
|
||||||
successMessage = 'Navigated to templates'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'vector_db':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/vector-db`
|
|
||||||
successMessage = 'Navigated to vector database'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'settings':
|
|
||||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'general' } }))
|
|
||||||
successMessage = 'Opened settings'
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown destination: ${destination}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Navigate if URL was set
|
|
||||||
if (navigationUrl) {
|
|
||||||
window.location.href = navigationUrl
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, successMessage, {
|
|
||||||
destination,
|
|
||||||
workflowName,
|
|
||||||
navigated: true,
|
|
||||||
})
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Navigation failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
|
|
||||||
// Get destination info for better error message
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
const dest = params?.destination
|
|
||||||
const wfName = params?.workflowName
|
|
||||||
|
|
||||||
let errorMessage = e?.message || 'Failed to navigate'
|
|
||||||
if (dest === 'workflow' && wfName) {
|
|
||||||
errorMessage = `Failed to navigate to workflow "${wfName}": ${e?.message || 'Unknown error'}`
|
|
||||||
} else if (dest) {
|
|
||||||
errorMessage = `Failed to navigate to ${dest}: ${e?.message || 'Unknown error'}`
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.markToolComplete(500, errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: NavigateUIArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { KeyRound, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface AuthArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Auth tool that spawns a subagent to handle authentication setup.
|
|
||||||
* This tool auto-executes and the actual work is done by the auth subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class AuthClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'auth'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, AuthClientTool.id, AuthClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Authenticated', icon: KeyRound },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to authenticate', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped auth', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted auth', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Authenticating',
|
|
||||||
completedLabel: 'Authenticated',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the auth tool.
|
|
||||||
* This just marks the tool as executing - the actual auth work is done server-side
|
|
||||||
* by the auth subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: AuthArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(AuthClientTool.id, AuthClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Check, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
interface CheckoffTodoArgs {
|
|
||||||
id?: string
|
|
||||||
todoId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CheckoffTodoClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'checkoff_todo'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CheckoffTodoClientTool.id, CheckoffTodoClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Marking todo', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Marking todo', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Marked todo complete', icon: Check },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to mark todo', icon: XCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: CheckoffTodoArgs): Promise<void> {
|
|
||||||
const logger = createLogger('CheckoffTodoClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const todoId = args?.id || args?.todoId
|
|
||||||
if (!todoId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'Missing todo id')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
|
||||||
const store = useCopilotStore.getState()
|
|
||||||
if (store.updatePlanTodoStatus) {
|
|
||||||
store.updatePlanTodoStatus(todoId, 'completed')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Todo checked off', { todoId })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to check off todo')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class CrawlWebsiteClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'crawl_website'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CrawlWebsiteClientTool.id, CrawlWebsiteClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Crawled website', icon: Globe },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to crawl website', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted crawling website', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped crawling website', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.url && typeof params.url === 'string') {
|
|
||||||
const url = params.url
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Crawled ${url}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Crawling ${url}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to crawl ${url}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted crawling ${url}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped crawling ${url}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Loader2, Wrench, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface CustomToolArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Custom tool that spawns a subagent to manage custom tools.
|
|
||||||
* This tool auto-executes and the actual work is done by the custom_tool subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class CustomToolClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'custom_tool'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CustomToolClientTool.id, CustomToolClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Managed custom tool', icon: Wrench },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed custom tool', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped custom tool', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted custom tool', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Managing custom tool',
|
|
||||||
completedLabel: 'Custom tool managed',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the custom_tool tool.
|
|
||||||
* This just marks the tool as executing - the actual custom tool work is done server-side
|
|
||||||
* by the custom_tool subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: CustomToolArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(CustomToolClientTool.id, CustomToolClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import { Bug, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface DebugArgs {
|
|
||||||
error_description: string
|
|
||||||
context?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Debug tool that spawns a subagent to diagnose workflow issues.
|
|
||||||
* This tool auto-executes and the actual work is done by the debug subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class DebugClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'debug'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, DebugClientTool.id, DebugClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped debug', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted debug', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Debugging',
|
|
||||||
completedLabel: 'Debugged',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the debug tool.
|
|
||||||
* This just marks the tool as executing - the actual debug work is done server-side
|
|
||||||
* by the debug subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: DebugArgs): Promise<void> {
|
|
||||||
// Immediately transition to executing state - no user confirmation needed
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the debug subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(DebugClientTool.id, DebugClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface DeployArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deploy tool that spawns a subagent to handle deployment.
|
|
||||||
* This tool auto-executes and the actual work is done by the deploy subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class DeployClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'deploy'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, DeployClientTool.id, DeployClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Deployed', icon: Rocket },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to deploy', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped deploy', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted deploy', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Deploying',
|
|
||||||
completedLabel: 'Deployed',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the deploy tool.
|
|
||||||
* This just marks the tool as executing - the actual deploy work is done server-side
|
|
||||||
* by the deploy subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: DeployArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(DeployClientTool.id, DeployClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
import { Loader2, Pencil, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface EditArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Edit tool that spawns a subagent to apply code/workflow edits.
|
|
||||||
* This tool auto-executes and the actual work is done by the edit subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class EditClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'edit'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, EditClientTool.id, EditClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Editing', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Editing', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Editing', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Edited', icon: Pencil },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to apply edit', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped edit', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted edit', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
isSpecial: true,
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Editing',
|
|
||||||
completedLabel: 'Edited',
|
|
||||||
shouldCollapse: false, // Edit subagent stays expanded
|
|
||||||
outputArtifacts: ['edit_summary'],
|
|
||||||
hideThinkingText: true, // We show WorkflowEditSummary instead
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the edit tool.
|
|
||||||
* This just marks the tool as executing - the actual edit work is done server-side
|
|
||||||
* by the edit subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: EditArgs): Promise<void> {
|
|
||||||
// Immediately transition to executing state - no user confirmation needed
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the edit subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(EditClientTool.id, EditClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { ClipboardCheck, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface EvaluateArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Evaluate tool that spawns a subagent to evaluate workflows or outputs.
|
|
||||||
* This tool auto-executes and the actual work is done by the evaluate subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class EvaluateClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'evaluate'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, EvaluateClientTool.id, EvaluateClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Evaluating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Evaluating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Evaluated', icon: ClipboardCheck },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to evaluate', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped evaluation', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted evaluation', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Evaluating',
|
|
||||||
completedLabel: 'Evaluated',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the evaluate tool.
|
|
||||||
* This just marks the tool as executing - the actual evaluation work is done server-side
|
|
||||||
* by the evaluate subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: EvaluateArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(EvaluateClientTool.id, EvaluateClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
import { FileText, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetPageContentsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_page_contents'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetPageContentsClientTool.id, GetPageContentsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting page contents', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting page contents', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting page contents', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved page contents', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get page contents', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting page contents', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped getting page contents', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.urls && Array.isArray(params.urls) && params.urls.length > 0) {
|
|
||||||
const firstUrl = String(params.urls[0])
|
|
||||||
const count = params.urls.length
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return count > 1 ? `Retrieved ${count} pages` : `Retrieved ${firstUrl}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return count > 1 ? `Getting ${count} pages` : `Getting ${firstUrl}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return count > 1 ? `Failed to get ${count} pages` : `Failed to get ${firstUrl}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return count > 1 ? `Aborted getting ${count} pages` : `Aborted getting ${firstUrl}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return count > 1 ? `Skipped getting ${count} pages` : `Skipped getting ${firstUrl}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Info, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface InfoArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Info tool that spawns a subagent to retrieve information.
|
|
||||||
* This tool auto-executes and the actual work is done by the info subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class InfoClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'info'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, InfoClientTool.id, InfoClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting info', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting info', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting info', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved info', icon: Info },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get info', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped info', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted info', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Getting info',
|
|
||||||
completedLabel: 'Info retrieved',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the info tool.
|
|
||||||
* This just marks the tool as executing - the actual info work is done server-side
|
|
||||||
* by the info subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: InfoArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(InfoClientTool.id, InfoClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { BookOpen, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface KnowledgeArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Knowledge tool that spawns a subagent to manage knowledge bases.
|
|
||||||
* This tool auto-executes and the actual work is done by the knowledge subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class KnowledgeClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'knowledge'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, KnowledgeClientTool.id, KnowledgeClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Managing knowledge', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Managing knowledge', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Managing knowledge', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Managed knowledge', icon: BookOpen },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to manage knowledge', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped knowledge', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted knowledge', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Managing knowledge',
|
|
||||||
completedLabel: 'Knowledge managed',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the knowledge tool.
|
|
||||||
* This just marks the tool as executing - the actual knowledge search work is done server-side
|
|
||||||
* by the knowledge subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: KnowledgeArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(KnowledgeClientTool.id, KnowledgeClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Globe2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface MakeApiRequestArgs {
|
|
||||||
url: string
|
|
||||||
method: 'GET' | 'POST' | 'PUT'
|
|
||||||
queryParams?: Record<string, string | number | boolean>
|
|
||||||
headers?: Record<string, string>
|
|
||||||
body?: any
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MakeApiRequestClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'make_api_request'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, MakeApiRequestClientTool.id, MakeApiRequestClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Preparing API request', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Review API request', icon: Globe2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Executing API request', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Completed API request', icon: Globe2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to execute API request', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped API request', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted API request', icon: XCircle },
|
|
||||||
},
|
|
||||||
interrupt: {
|
|
||||||
accept: { text: 'Execute', icon: Globe2 },
|
|
||||||
reject: { text: 'Skip', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
interrupt: {
|
|
||||||
accept: { text: 'Execute', icon: Globe2 },
|
|
||||||
reject: { text: 'Skip', icon: MinusCircle },
|
|
||||||
showAllowOnce: true,
|
|
||||||
showAllowAlways: true,
|
|
||||||
},
|
|
||||||
paramsTable: {
|
|
||||||
columns: [
|
|
||||||
{ key: 'method', label: 'Method', width: '26%', editable: true, mono: true },
|
|
||||||
{ key: 'url', label: 'Endpoint', width: '74%', editable: true, mono: true },
|
|
||||||
],
|
|
||||||
extractRows: (params) => {
|
|
||||||
return [['request', (params.method || 'GET').toUpperCase(), params.url || '']]
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.url && typeof params.url === 'string') {
|
|
||||||
const method = params.method || 'GET'
|
|
||||||
let url = params.url
|
|
||||||
|
|
||||||
// Extract domain from URL for cleaner display
|
|
||||||
try {
|
|
||||||
const urlObj = new URL(url)
|
|
||||||
url = urlObj.hostname + urlObj.pathname
|
|
||||||
} catch {
|
|
||||||
// Use URL as-is if parsing fails
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `${method} ${url} complete`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
return `${method} ${url}`
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
return `Preparing ${method} ${url}`
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Review ${method} ${url}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed ${method} ${url}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped ${method} ${url}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted ${method} ${url}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: MakeApiRequestArgs): Promise<void> {
|
|
||||||
const logger = createLogger('MakeApiRequestClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'make_api_request', payload: args || {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'API request executed', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'API request failed')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: MakeApiRequestArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(MakeApiRequestClientTool.id, MakeApiRequestClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
interface MarkTodoInProgressArgs {
|
|
||||||
id?: string
|
|
||||||
todoId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MarkTodoInProgressClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'mark_todo_in_progress'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, MarkTodoInProgressClientTool.id, MarkTodoInProgressClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Marking todo in progress', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Marking todo in progress', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Marking todo in progress', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Marked todo in progress', icon: Loader2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to mark in progress', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted marking in progress', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped marking in progress', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: MarkTodoInProgressArgs): Promise<void> {
|
|
||||||
const logger = createLogger('MarkTodoInProgressClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const todoId = args?.id || args?.todoId
|
|
||||||
if (!todoId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'Missing todo id')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
|
||||||
const store = useCopilotStore.getState()
|
|
||||||
if (store.updatePlanTodoStatus) {
|
|
||||||
store.updatePlanTodoStatus(todoId, 'executing')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Todo marked in progress', { todoId })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to mark todo in progress')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,174 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { CheckCircle, Loader2, MinusCircle, PlugZap, X, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { OAUTH_PROVIDERS, type OAuthServiceConfig } from '@/lib/oauth'
|
|
||||||
|
|
||||||
const logger = createLogger('OAuthRequestAccessClientTool')
|
|
||||||
|
|
||||||
interface OAuthRequestAccessArgs {
|
|
||||||
providerName?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ResolvedServiceInfo {
|
|
||||||
serviceId: string
|
|
||||||
providerId: string
|
|
||||||
service: OAuthServiceConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds the service configuration from a provider name.
|
|
||||||
* The providerName should match the exact `name` field returned by get_credentials tool's notConnected services.
|
|
||||||
*/
|
|
||||||
function findServiceByName(providerName: string): ResolvedServiceInfo | null {
|
|
||||||
const normalizedName = providerName.toLowerCase().trim()
|
|
||||||
|
|
||||||
// First pass: exact match (case-insensitive)
|
|
||||||
for (const [, providerConfig] of Object.entries(OAUTH_PROVIDERS)) {
|
|
||||||
for (const [serviceId, service] of Object.entries(providerConfig.services)) {
|
|
||||||
if (service.name.toLowerCase() === normalizedName) {
|
|
||||||
return { serviceId, providerId: service.providerId, service }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Second pass: partial match as fallback for flexibility
|
|
||||||
for (const [, providerConfig] of Object.entries(OAUTH_PROVIDERS)) {
|
|
||||||
for (const [serviceId, service] of Object.entries(providerConfig.services)) {
|
|
||||||
if (
|
|
||||||
service.name.toLowerCase().includes(normalizedName) ||
|
|
||||||
normalizedName.includes(service.name.toLowerCase())
|
|
||||||
) {
|
|
||||||
return { serviceId, providerId: service.providerId, service }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface OAuthConnectEventDetail {
|
|
||||||
providerName: string
|
|
||||||
serviceId: string
|
|
||||||
providerId: string
|
|
||||||
requiredScopes: string[]
|
|
||||||
newScopes?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class OAuthRequestAccessClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'oauth_request_access'
|
|
||||||
|
|
||||||
private providerName?: string
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, OAuthRequestAccessClientTool.id, OAuthRequestAccessClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Requesting integration access', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Requesting integration access', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Requesting integration access', icon: Loader2 },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped integration access', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.success]: { text: 'Requested integration access', icon: CheckCircle },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to request integration access', icon: X },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted integration access request', icon: XCircle },
|
|
||||||
},
|
|
||||||
interrupt: {
|
|
||||||
accept: { text: 'Connect', icon: PlugZap },
|
|
||||||
reject: { text: 'Skip', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params.providerName) {
|
|
||||||
const name = params.providerName
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
return `Requesting ${name} access`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped ${name} access`
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Requested ${name} access`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to request ${name} access`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted ${name} access request`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: OAuthRequestAccessArgs): Promise<void> {
|
|
||||||
try {
|
|
||||||
if (args?.providerName) {
|
|
||||||
this.providerName = args.providerName
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!this.providerName) {
|
|
||||||
logger.error('No provider name provided')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'No provider name specified')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the service by name
|
|
||||||
const serviceInfo = findServiceByName(this.providerName)
|
|
||||||
if (!serviceInfo) {
|
|
||||||
logger.error('Could not find OAuth service for provider', {
|
|
||||||
providerName: this.providerName,
|
|
||||||
})
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, `Unknown provider: ${this.providerName}`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const { serviceId, providerId, service } = serviceInfo
|
|
||||||
|
|
||||||
logger.info('Opening OAuth connect modal', {
|
|
||||||
providerName: this.providerName,
|
|
||||||
serviceId,
|
|
||||||
providerId,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Move to executing state
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Dispatch event to open the OAuth modal (same pattern as open-settings)
|
|
||||||
window.dispatchEvent(
|
|
||||||
new CustomEvent<OAuthConnectEventDetail>('open-oauth-connect', {
|
|
||||||
detail: {
|
|
||||||
providerName: this.providerName,
|
|
||||||
serviceId,
|
|
||||||
providerId,
|
|
||||||
requiredScopes: service.scopes || [],
|
|
||||||
},
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
// Mark as success - the user opened the prompt, but connection is not guaranteed
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`The user opened the ${this.providerName} connection prompt and may have connected. Check the connected integrations to verify the connection status.`
|
|
||||||
)
|
|
||||||
} catch (e) {
|
|
||||||
logger.error('Failed to open OAuth connect modal', { error: e })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, 'Failed to open OAuth connection dialog')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: OAuthRequestAccessArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import { ListTodo, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface PlanArgs {
|
|
||||||
request: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Plan tool that spawns a subagent to plan an approach.
|
|
||||||
* This tool auto-executes and the actual work is done by the plan subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class PlanClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'plan'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, PlanClientTool.id, PlanClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Planning', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Planning', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Planning', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Planned', icon: ListTodo },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to plan', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped plan', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted plan', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Planning',
|
|
||||||
completedLabel: 'Planned',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: ['plan'],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the plan tool.
|
|
||||||
* This just marks the tool as executing - the actual planning work is done server-side
|
|
||||||
* by the plan subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: PlanArgs): Promise<void> {
|
|
||||||
// Immediately transition to executing state - no user confirmation needed
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the plan subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(PlanClientTool.id, PlanClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,76 +0,0 @@
|
|||||||
import { CheckCircle2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class RememberDebugClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'remember_debug'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, RememberDebugClientTool.id, RememberDebugClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Validating fix', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Validating fix', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Validating fix', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Validated fix', icon: CheckCircle2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to validate', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted validation', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped validation', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const operation = params?.operation
|
|
||||||
|
|
||||||
if (operation === 'add' || operation === 'edit') {
|
|
||||||
// For add/edit, show from problem or solution
|
|
||||||
const text = params?.problem || params?.solution
|
|
||||||
if (text && typeof text === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Validated fix ${text}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Validating fix ${text}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to validate fix ${text}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted validating fix ${text}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped validating fix ${text}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (operation === 'delete') {
|
|
||||||
// For delete, show from problem or solution (or id as fallback)
|
|
||||||
const text = params?.problem || params?.solution || params?.id
|
|
||||||
if (text && typeof text === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Adjusted fix ${text}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Adjusting fix ${text}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to adjust fix ${text}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted adjusting fix ${text}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped adjusting fix ${text}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Loader2, Search, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface ResearchArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Research tool that spawns a subagent to research information.
|
|
||||||
* This tool auto-executes and the actual work is done by the research subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class ResearchClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'research'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, ResearchClientTool.id, ResearchClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Researching', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Researching', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Researching', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Researched', icon: Search },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to research', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped research', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted research', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Researching',
|
|
||||||
completedLabel: 'Researched',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the research tool.
|
|
||||||
* This just marks the tool as executing - the actual research work is done server-side
|
|
||||||
* by the research subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: ResearchArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(ResearchClientTool.id, ResearchClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class ScrapePageClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'scrape_page'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, ScrapePageClientTool.id, ScrapePageClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Scraping page', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Scraping page', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Scraping page', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Scraped page', icon: Globe },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to scrape page', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted scraping page', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped scraping page', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.url && typeof params.url === 'string') {
|
|
||||||
const url = params.url
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Scraped ${url}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Scraping ${url}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to scrape ${url}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted scraping ${url}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped scraping ${url}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface SearchDocumentationArgs {
|
|
||||||
query: string
|
|
||||||
topK?: number
|
|
||||||
threshold?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SearchDocumentationClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'search_documentation'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SearchDocumentationClientTool.id, SearchDocumentationClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Searching documentation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Searching documentation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Searching documentation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Completed documentation search', icon: BookOpen },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to search docs', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted documentation search', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped documentation search', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Searched docs for ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching docs for ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to search docs for ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching docs for ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching docs for ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: SearchDocumentationArgs): Promise<void> {
|
|
||||||
const logger = createLogger('SearchDocumentationClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'search_documentation', payload: args || {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Documentation search complete', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Documentation search failed')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Bug, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SearchErrorsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'search_errors'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SearchErrorsClientTool.id, SearchErrorsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted debugging', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped debugging', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Debugged ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Debugging ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to debug ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted debugging ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped debugging ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SearchLibraryDocsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'search_library_docs'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SearchLibraryDocsClientTool.id, SearchLibraryDocsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Reading docs', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Reading docs', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Reading docs', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Read docs', icon: BookOpen },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to read docs', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted reading docs', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped reading docs', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const libraryName = params?.library_name
|
|
||||||
if (libraryName && typeof libraryName === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Read ${libraryName} docs`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Reading ${libraryName} docs`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to read ${libraryName} docs`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted reading ${libraryName} docs`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped reading ${libraryName} docs`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SearchOnlineClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'search_online'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SearchOnlineClientTool.id, SearchOnlineClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Searching online', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Searching online', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Searching online', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Completed online search', icon: Globe },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to search online', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped online search', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted online search', icon: XCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Searched online for ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching online for ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to search online for ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching online for ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching online for ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, Search, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SearchPatternsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'search_patterns'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SearchPatternsClientTool.id, SearchPatternsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Searching workflow patterns', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Searching workflow patterns', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Searching workflow patterns', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Found workflow patterns', icon: Search },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to search patterns', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted pattern search', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped pattern search', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.queries && Array.isArray(params.queries) && params.queries.length > 0) {
|
|
||||||
const firstQuery = String(params.queries[0])
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Searched ${firstQuery}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching ${firstQuery}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to search ${firstQuery}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching ${firstQuery}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching ${firstQuery}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user