mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-29 08:48:02 -05:00
Compare commits
3 Commits
v0.5.75
...
feat/copil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a6e2e4bdb9 | ||
|
|
f808fd6c69 | ||
|
|
cee74f8eb5 |
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { copilotChats } from '@sim/db/schema'
|
import { copilotChats, workflow } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, desc, eq } from 'drizzle-orm'
|
import { and, desc, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
@@ -16,6 +16,10 @@ import {
|
|||||||
createRequestTracker,
|
createRequestTracker,
|
||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import {
|
||||||
|
handleToolCallEvent,
|
||||||
|
registerServerHandledTool,
|
||||||
|
} from '@/lib/copilot/server-executor/stream-handler'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
@@ -36,11 +40,24 @@ const FileAttachmentSchema = z.object({
|
|||||||
size: z.number(),
|
size: z.number(),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session context for headless mode.
|
||||||
|
* In headless mode, workflowId may not be known at start.
|
||||||
|
* The set_context tool can be used to establish context mid-conversation.
|
||||||
|
*/
|
||||||
|
const SessionContextSchema = z.object({
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
const ChatMessageSchema = z.object({
|
const ChatMessageSchema = z.object({
|
||||||
message: z.string().min(1, 'Message is required'),
|
message: z.string().min(1, 'Message is required'),
|
||||||
userMessageId: z.string().optional(), // ID from frontend for the user message
|
userMessageId: z.string().optional(), // ID from frontend for the user message
|
||||||
chatId: z.string().optional(),
|
chatId: z.string().optional(),
|
||||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
// workflowId is optional for headless mode - can be set via set_context tool
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
// Session context for headless mode - provides initial context that can be updated via set_context
|
||||||
|
sessionContext: SessionContextSchema.optional(),
|
||||||
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
||||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
prefetch: z.boolean().optional(),
|
prefetch: z.boolean().optional(),
|
||||||
@@ -101,6 +118,7 @@ export async function POST(req: NextRequest) {
|
|||||||
userMessageId,
|
userMessageId,
|
||||||
chatId,
|
chatId,
|
||||||
workflowId,
|
workflowId,
|
||||||
|
sessionContext,
|
||||||
model,
|
model,
|
||||||
mode,
|
mode,
|
||||||
prefetch,
|
prefetch,
|
||||||
@@ -113,6 +131,41 @@ export async function POST(req: NextRequest) {
|
|||||||
contexts,
|
contexts,
|
||||||
commands,
|
commands,
|
||||||
} = ChatMessageSchema.parse(body)
|
} = ChatMessageSchema.parse(body)
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Resolve execution context (workflowId, workspaceId)
|
||||||
|
// In client mode: workflowId comes from request, we look up workspaceId
|
||||||
|
// In headless mode: may start without workflowId, set via set_context tool
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
const resolvedWorkflowId = workflowId || sessionContext?.workflowId
|
||||||
|
let resolvedWorkspaceId = sessionContext?.workspaceId
|
||||||
|
|
||||||
|
// If we have a workflowId but no workspaceId, look it up once
|
||||||
|
if (resolvedWorkflowId && !resolvedWorkspaceId) {
|
||||||
|
try {
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, resolvedWorkflowId))
|
||||||
|
.limit(1)
|
||||||
|
resolvedWorkspaceId = wf?.workspaceId ?? undefined
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${tracker.requestId}] Failed to lookup workspaceId for workflow`, {
|
||||||
|
workflowId: resolvedWorkflowId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build execution context that will be passed to Go and used for tool execution
|
||||||
|
const executionContext = {
|
||||||
|
userId: authenticatedUserId,
|
||||||
|
workflowId: resolvedWorkflowId,
|
||||||
|
workspaceId: resolvedWorkspaceId,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`[${tracker.requestId}] Resolved execution context`, executionContext)
|
||||||
|
|
||||||
// Ensure we have a consistent user message ID for this request
|
// Ensure we have a consistent user message ID for this request
|
||||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||||
try {
|
try {
|
||||||
@@ -427,7 +480,7 @@ export async function POST(req: NextRequest) {
|
|||||||
|
|
||||||
const requestPayload = {
|
const requestPayload = {
|
||||||
message: message, // Just send the current user message text
|
message: message, // Just send the current user message text
|
||||||
workflowId,
|
workflowId: resolvedWorkflowId,
|
||||||
userId: authenticatedUserId,
|
userId: authenticatedUserId,
|
||||||
stream: stream,
|
stream: stream,
|
||||||
streamToolCalls: true,
|
streamToolCalls: true,
|
||||||
@@ -435,6 +488,9 @@ export async function POST(req: NextRequest) {
|
|||||||
mode: transportMode,
|
mode: transportMode,
|
||||||
messageId: userMessageIdToUse,
|
messageId: userMessageIdToUse,
|
||||||
version: SIM_AGENT_VERSION,
|
version: SIM_AGENT_VERSION,
|
||||||
|
// Execution context for Go to maintain and echo back in tool_call events
|
||||||
|
// This enables headless mode where context can be set dynamically via set_context tool
|
||||||
|
executionContext,
|
||||||
...(providerConfig ? { provider: providerConfig } : {}),
|
...(providerConfig ? { provider: providerConfig } : {}),
|
||||||
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
|
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
|
||||||
...(typeof prefetch === 'boolean' ? { prefetch: prefetch } : {}),
|
...(typeof prefetch === 'boolean' ? { prefetch: prefetch } : {}),
|
||||||
@@ -618,6 +674,39 @@ export async function POST(req: NextRequest) {
|
|||||||
toolCalls.push(event.data)
|
toolCalls.push(event.data)
|
||||||
if (event.data?.id) {
|
if (event.data?.id) {
|
||||||
announcedToolCallIds.add(event.data.id)
|
announcedToolCallIds.add(event.data.id)
|
||||||
|
|
||||||
|
// Execute server-side tools automatically
|
||||||
|
// This runs async and calls mark-complete when done
|
||||||
|
// Use context from Go's event.data.executionContext if provided,
|
||||||
|
// falling back to the initial resolved context
|
||||||
|
const toolContext = {
|
||||||
|
userId: authenticatedUserId,
|
||||||
|
workflowId:
|
||||||
|
event.data.executionContext?.workflowId || resolvedWorkflowId,
|
||||||
|
workspaceId:
|
||||||
|
event.data.executionContext?.workspaceId || resolvedWorkspaceId,
|
||||||
|
chatId: actualChatId,
|
||||||
|
}
|
||||||
|
handleToolCallEvent(
|
||||||
|
{
|
||||||
|
id: event.data.id,
|
||||||
|
name: event.data.name,
|
||||||
|
arguments: event.data.arguments || {},
|
||||||
|
partial: false,
|
||||||
|
},
|
||||||
|
toolContext
|
||||||
|
).then((handledServerSide) => {
|
||||||
|
if (handledServerSide) {
|
||||||
|
registerServerHandledTool(event.data.id, event.data.name)
|
||||||
|
logger.info(
|
||||||
|
`[${tracker.requestId}] Tool will be executed server-side`,
|
||||||
|
{
|
||||||
|
toolCallId: event.data.id,
|
||||||
|
toolName: event.data.name,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
|||||||
186
apps/sim/app/api/copilot/test/route.ts
Normal file
186
apps/sim/app/api/copilot/test/route.ts
Normal file
@@ -0,0 +1,186 @@
|
|||||||
|
/**
|
||||||
|
* POST /api/copilot/test
|
||||||
|
*
|
||||||
|
* Simple test endpoint for copilot without authentication.
|
||||||
|
* Pass just a query and optional userId to test headless mode.
|
||||||
|
*
|
||||||
|
* Request body:
|
||||||
|
* {
|
||||||
|
* query: string, // Required - the message to send
|
||||||
|
* userId?: string, // Optional - defaults to 'test-user'
|
||||||
|
* workflowId?: string, // Optional - workflow context
|
||||||
|
* workspaceId?: string, // Optional - workspace context
|
||||||
|
* stream?: boolean, // Optional - defaults to true
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import {
|
||||||
|
handleToolCallEvent,
|
||||||
|
registerServerHandledTool,
|
||||||
|
} from '@/lib/copilot/server-executor/stream-handler'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotTestAPI')
|
||||||
|
|
||||||
|
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||||
|
|
||||||
|
const TestRequestSchema = z.object({
|
||||||
|
query: z.string().min(1, 'Query is required'),
|
||||||
|
userId: z.string().optional().default('test-user'),
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
stream: z.boolean().optional().default(true),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await req.json()
|
||||||
|
const { query, userId, workflowId, workspaceId, stream } = TestRequestSchema.parse(body)
|
||||||
|
|
||||||
|
logger.info('Test copilot request', { query, userId, workflowId, workspaceId, stream })
|
||||||
|
|
||||||
|
// Build execution context
|
||||||
|
const executionContext = {
|
||||||
|
userId,
|
||||||
|
workflowId,
|
||||||
|
workspaceId,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build request payload for Go copilot
|
||||||
|
const requestPayload = {
|
||||||
|
message: query,
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
stream: stream,
|
||||||
|
streamToolCalls: true,
|
||||||
|
model: 'claude-sonnet-4-20250514',
|
||||||
|
mode: 'agent',
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
executionContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Sending to Go copilot', { url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming` })
|
||||||
|
|
||||||
|
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!simAgentResponse.ok) {
|
||||||
|
const errorText = await simAgentResponse.text().catch(() => '')
|
||||||
|
logger.error('Go copilot error', { status: simAgentResponse.status, error: errorText })
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: `Copilot error: ${simAgentResponse.statusText}`, details: errorText },
|
||||||
|
{ status: simAgentResponse.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stream && simAgentResponse.body) {
|
||||||
|
// Create streaming response
|
||||||
|
const transformedStream = new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
const encoder = new TextEncoder()
|
||||||
|
const reader = simAgentResponse.body!.getReader()
|
||||||
|
const decoder = new TextDecoder()
|
||||||
|
let buffer = ''
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
const { done, value } = await reader.read()
|
||||||
|
if (done) break
|
||||||
|
|
||||||
|
const chunk = decoder.decode(value, { stream: true })
|
||||||
|
buffer += chunk
|
||||||
|
|
||||||
|
const lines = buffer.split('\n')
|
||||||
|
buffer = lines.pop() || ''
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.trim() === '') continue
|
||||||
|
|
||||||
|
if (line.startsWith('data: ') && line.length > 6) {
|
||||||
|
try {
|
||||||
|
const jsonStr = line.slice(6)
|
||||||
|
const event = JSON.parse(jsonStr)
|
||||||
|
|
||||||
|
// Handle tool calls server-side
|
||||||
|
if (event.type === 'tool_call' && !event.data?.partial && event.data?.id) {
|
||||||
|
const toolContext = {
|
||||||
|
userId,
|
||||||
|
workflowId: event.data.executionContext?.workflowId || workflowId,
|
||||||
|
workspaceId: event.data.executionContext?.workspaceId || workspaceId,
|
||||||
|
chatId: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
handleToolCallEvent(
|
||||||
|
{
|
||||||
|
id: event.data.id,
|
||||||
|
name: event.data.name,
|
||||||
|
arguments: event.data.arguments || {},
|
||||||
|
partial: false,
|
||||||
|
},
|
||||||
|
toolContext
|
||||||
|
).then((handledServerSide) => {
|
||||||
|
if (handledServerSide) {
|
||||||
|
registerServerHandledTool(event.data.id, event.data.name)
|
||||||
|
logger.info('Tool executed server-side', {
|
||||||
|
toolCallId: event.data.id,
|
||||||
|
toolName: event.data.name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore parse errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Forward all events to client
|
||||||
|
controller.enqueue(encoder.encode(line + '\n'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle remaining buffer
|
||||||
|
if (buffer.trim()) {
|
||||||
|
controller.enqueue(encoder.encode(buffer + '\n'))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Stream error', { error })
|
||||||
|
} finally {
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(transformedStream, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'text/event-stream',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
Connection: 'keep-alive',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// Non-streaming response
|
||||||
|
const text = await simAgentResponse.text()
|
||||||
|
return NextResponse.json({ response: text })
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Test endpoint error', { error })
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 })
|
||||||
|
}
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Internal error', message: error instanceof Error ? error.message : String(error) },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import {
|
||||||
|
authenticateCopilotRequestSessionOnly,
|
||||||
|
createUnauthorizedResponse,
|
||||||
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import { getToolExecutionState } from '@/lib/copilot/server-executor/stream-handler'
|
||||||
|
|
||||||
|
const logger = createLogger('ToolExecutionStateAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/copilot/tools/execution-state/[toolCallId]
|
||||||
|
*
|
||||||
|
* Returns the execution state of a tool call.
|
||||||
|
* Useful for client reconnection scenarios.
|
||||||
|
*/
|
||||||
|
export async function GET(
|
||||||
|
req: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ toolCallId: string }> }
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return createUnauthorizedResponse()
|
||||||
|
}
|
||||||
|
|
||||||
|
const { toolCallId } = await params
|
||||||
|
|
||||||
|
if (!toolCallId) {
|
||||||
|
return NextResponse.json({ error: 'Tool call ID is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const state = await getToolExecutionState(toolCallId)
|
||||||
|
|
||||||
|
if (!state) {
|
||||||
|
return NextResponse.json({ error: 'Tool call not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the user owns this tool execution
|
||||||
|
if (state.userId !== userId) {
|
||||||
|
logger.warn("User attempted to access another user's tool execution", {
|
||||||
|
requestingUserId: userId,
|
||||||
|
ownerUserId: state.userId,
|
||||||
|
toolCallId,
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Tool call not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
toolCallId: state.toolCallId,
|
||||||
|
toolName: state.toolName,
|
||||||
|
status: state.status,
|
||||||
|
startedAt: state.startedAt,
|
||||||
|
completedAt: state.completedAt,
|
||||||
|
result: state.result,
|
||||||
|
error: state.error,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching tool execution state', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
85
apps/sim/app/api/copilot/tools/execution-states/route.ts
Normal file
85
apps/sim/app/api/copilot/tools/execution-states/route.ts
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import {
|
||||||
|
authenticateCopilotRequestSessionOnly,
|
||||||
|
createBadRequestResponse,
|
||||||
|
createUnauthorizedResponse,
|
||||||
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import { getToolExecutionState } from '@/lib/copilot/server-executor/stream-handler'
|
||||||
|
|
||||||
|
const logger = createLogger('ToolExecutionStatesAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
toolCallIds: z.array(z.string()).min(1).max(50),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/copilot/tools/execution-states
|
||||||
|
*
|
||||||
|
* Returns the execution states of multiple tool calls at once.
|
||||||
|
* Useful for efficient reconnection scenarios.
|
||||||
|
*/
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return createUnauthorizedResponse()
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await req.json()
|
||||||
|
const { toolCallIds } = RequestSchema.parse(body)
|
||||||
|
|
||||||
|
const states: Record<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
toolCallId: string
|
||||||
|
toolName: string
|
||||||
|
status: string
|
||||||
|
startedAt: number
|
||||||
|
completedAt?: number
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
} | null
|
||||||
|
> = {}
|
||||||
|
|
||||||
|
// Fetch all states in parallel
|
||||||
|
const results = await Promise.all(
|
||||||
|
toolCallIds.map(async (toolCallId) => {
|
||||||
|
const state = await getToolExecutionState(toolCallId)
|
||||||
|
// Filter out states that don't belong to this user
|
||||||
|
if (state && state.userId !== userId) {
|
||||||
|
return { toolCallId, state: null }
|
||||||
|
}
|
||||||
|
return { toolCallId, state }
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
for (const { toolCallId, state } of results) {
|
||||||
|
if (state) {
|
||||||
|
states[toolCallId] = {
|
||||||
|
toolCallId: state.toolCallId,
|
||||||
|
toolName: state.toolName,
|
||||||
|
status: state.status,
|
||||||
|
startedAt: state.startedAt,
|
||||||
|
completedAt: state.completedAt,
|
||||||
|
result: state.result,
|
||||||
|
error: state.error,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
states[toolCallId] = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ states })
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return createBadRequestResponse('Invalid request body')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('Error fetching tool execution states', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,20 +9,13 @@ import {
|
|||||||
createRequestTracker,
|
createRequestTracker,
|
||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
|
import { MarkCompletePayloadSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||||
|
|
||||||
const MarkCompleteSchema = z.object({
|
|
||||||
id: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
status: z.number().int(),
|
|
||||||
message: z.any().optional(),
|
|
||||||
data: z.any().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/tools/mark-complete
|
* POST /api/copilot/tools/mark-complete
|
||||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
||||||
@@ -46,7 +39,7 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
} catch {}
|
} catch {}
|
||||||
|
|
||||||
const parsed = MarkCompleteSchema.parse(body)
|
const parsed = MarkCompletePayloadSchema.parse(body)
|
||||||
|
|
||||||
const messagePreview = (() => {
|
const messagePreview = (() => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
14
apps/sim/app/api/copilot/tools/server-executed/route.ts
Normal file
14
apps/sim/app/api/copilot/tools/server-executed/route.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import { NextResponse } from 'next/server'
|
||||||
|
import { SERVER_EXECUTED_TOOLS } from '@/lib/copilot/server-executor/registry'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/copilot/tools/server-executed
|
||||||
|
*
|
||||||
|
* Returns the list of tools that are executed server-side.
|
||||||
|
* Clients can use this to avoid double-executing these tools.
|
||||||
|
*/
|
||||||
|
export async function GET() {
|
||||||
|
return NextResponse.json({
|
||||||
|
tools: SERVER_EXECUTED_TOOLS,
|
||||||
|
})
|
||||||
|
}
|
||||||
162
apps/sim/lib/copilot/server-executed-tools.ts
Normal file
162
apps/sim/lib/copilot/server-executed-tools.ts
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
/**
|
||||||
|
* Client-side utilities for server-executed tools.
|
||||||
|
*
|
||||||
|
* This module helps the client know which tools are executed server-side
|
||||||
|
* to avoid double-execution.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
|
||||||
|
const logger = createLogger('ServerExecutedTools')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List of tools that are executed server-side.
|
||||||
|
* This is cached after the first fetch.
|
||||||
|
*/
|
||||||
|
let cachedServerExecutedTools: Set<string> | null = null
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tools currently being executed server-side.
|
||||||
|
* Maps toolCallId to tool info.
|
||||||
|
*/
|
||||||
|
const serverHandledToolCalls = new Map<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
toolName: string
|
||||||
|
startedAt: number
|
||||||
|
}
|
||||||
|
>()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch the list of server-executed tools from the API.
|
||||||
|
* Results are cached for the session.
|
||||||
|
*/
|
||||||
|
export async function fetchServerExecutedTools(): Promise<Set<string>> {
|
||||||
|
if (cachedServerExecutedTools) {
|
||||||
|
return cachedServerExecutedTools
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/copilot/tools/server-executed')
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP ${response.status}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as { tools: string[] }
|
||||||
|
cachedServerExecutedTools = new Set(data.tools)
|
||||||
|
|
||||||
|
logger.info('Fetched server-executed tools', {
|
||||||
|
count: cachedServerExecutedTools.size,
|
||||||
|
tools: Array.from(cachedServerExecutedTools),
|
||||||
|
})
|
||||||
|
|
||||||
|
return cachedServerExecutedTools
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to fetch server-executed tools, using empty set', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
// Return empty set on error - tools will execute client-side as fallback
|
||||||
|
return new Set()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool is executed server-side.
|
||||||
|
* Uses cached list or fetches if not available.
|
||||||
|
*/
|
||||||
|
export async function isServerExecutedTool(toolName: string): Promise<boolean> {
|
||||||
|
const serverTools = await fetchServerExecutedTools()
|
||||||
|
return serverTools.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synchronous check if a tool is server-executed.
|
||||||
|
* Returns false if cache is not yet populated.
|
||||||
|
*/
|
||||||
|
export function isServerExecutedToolSync(toolName: string): boolean {
|
||||||
|
if (!cachedServerExecutedTools) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return cachedServerExecutedTools.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the cached list of server-executed tools.
|
||||||
|
* Returns null if not yet fetched.
|
||||||
|
*/
|
||||||
|
export function getServerExecutedToolsSync(): Set<string> | null {
|
||||||
|
return cachedServerExecutedTools
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a tool call as being handled by the server.
|
||||||
|
* Used to prevent client from executing it.
|
||||||
|
*/
|
||||||
|
export function markToolCallServerHandled(toolCallId: string, toolName: string): void {
|
||||||
|
serverHandledToolCalls.set(toolCallId, {
|
||||||
|
toolName,
|
||||||
|
startedAt: Date.now(),
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.debug('Marked tool call as server-handled', { toolCallId, toolName })
|
||||||
|
|
||||||
|
// Cleanup old entries (older than 1 hour)
|
||||||
|
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||||
|
for (const [id, info] of serverHandledToolCalls.entries()) {
|
||||||
|
if (info.startedAt < oneHourAgo) {
|
||||||
|
serverHandledToolCalls.delete(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific tool call is being handled by the server.
|
||||||
|
*/
|
||||||
|
export function isToolCallServerHandled(toolCallId: string): boolean {
|
||||||
|
return serverHandledToolCalls.has(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a tool call from server-handled tracking.
|
||||||
|
* Called when tool_result is received.
|
||||||
|
*/
|
||||||
|
export function clearToolCallServerHandled(toolCallId: string): void {
|
||||||
|
serverHandledToolCalls.delete(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool execution state from the server (for reconnection scenarios).
|
||||||
|
*/
|
||||||
|
export async function getToolExecutionState(toolCallId: string): Promise<{
|
||||||
|
status: 'pending' | 'executing' | 'completed' | 'failed' | 'unknown'
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
} | null> {
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/copilot/tools/execution-state/${toolCallId}`)
|
||||||
|
if (!response.ok) {
|
||||||
|
if (response.status === 404) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
throw new Error(`HTTP ${response.status}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return await response.json()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to get tool execution state', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pre-fetch server-executed tools list.
|
||||||
|
* Call this early in the app lifecycle.
|
||||||
|
*/
|
||||||
|
export function prefetchServerExecutedTools(): void {
|
||||||
|
fetchServerExecutedTools().catch(() => {
|
||||||
|
// Errors already logged in fetchServerExecutedTools
|
||||||
|
})
|
||||||
|
}
|
||||||
58
apps/sim/lib/copilot/server-executor/index.ts
Normal file
58
apps/sim/lib/copilot/server-executor/index.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
/**
|
||||||
|
* Server-side tool executor.
|
||||||
|
*
|
||||||
|
* This module provides the ability to execute tools server-side (in Next.js API routes)
|
||||||
|
* rather than requiring the browser to execute them.
|
||||||
|
*
|
||||||
|
* Key function: executeToolOnServer()
|
||||||
|
* - Returns ToolResult if the tool was executed server-side
|
||||||
|
* - Returns null if the tool is not registered (client should handle)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { executeRegisteredTool, isServerExecutedTool } from './registry'
|
||||||
|
import type { ExecutionContext, ToolResult } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('ServerExecutor')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool on the server if it's registered.
|
||||||
|
*
|
||||||
|
* @param toolName - The name of the tool to execute
|
||||||
|
* @param args - The arguments to pass to the tool
|
||||||
|
* @param context - Execution context (userId, workflowId, etc.)
|
||||||
|
* @returns ToolResult if executed, null if tool not registered server-side
|
||||||
|
*/
|
||||||
|
export async function executeToolOnServer(
|
||||||
|
toolName: string,
|
||||||
|
args: unknown,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolResult | null> {
|
||||||
|
// Check if this tool should be executed server-side
|
||||||
|
if (!isServerExecutedTool(toolName)) {
|
||||||
|
logger.debug('Tool not registered for server execution, client will handle', { toolName })
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Executing tool server-side', {
|
||||||
|
toolName,
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
const result = await executeRegisteredTool(toolName, args, context)
|
||||||
|
|
||||||
|
logger.info('Tool execution completed', {
|
||||||
|
toolName,
|
||||||
|
success: result.success,
|
||||||
|
durationMs: Date.now() - startTime,
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
export { isServerExecutedTool, SERVER_EXECUTED_TOOLS } from './registry'
|
||||||
|
// Re-export types and utilities
|
||||||
|
export type { ExecutionContext, ToolResult } from './types'
|
||||||
|
export { errorResult, successResult } from './types'
|
||||||
440
apps/sim/lib/copilot/server-executor/registry.ts
Normal file
440
apps/sim/lib/copilot/server-executor/registry.ts
Normal file
@@ -0,0 +1,440 @@
|
|||||||
|
/**
|
||||||
|
* Server Tool Registry
|
||||||
|
*
|
||||||
|
* Central registry for all server-executed tools. This replaces the scattered
|
||||||
|
* executor files with a single, declarative registry.
|
||||||
|
*
|
||||||
|
* Benefits:
|
||||||
|
* - Single source of truth for tool registration
|
||||||
|
* - Type-safe with Zod schemas
|
||||||
|
* - No duplicate wrapper code
|
||||||
|
* - Easy to add new tools
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { z } from 'zod'
|
||||||
|
import { getBlockConfigServerTool } from '../tools/server/blocks/get-block-config'
|
||||||
|
import { getBlockOptionsServerTool } from '../tools/server/blocks/get-block-options'
|
||||||
|
// Import server tool implementations
|
||||||
|
import { getBlocksAndToolsServerTool } from '../tools/server/blocks/get-blocks-and-tools'
|
||||||
|
import { getBlocksMetadataServerTool } from '../tools/server/blocks/get-blocks-metadata-tool'
|
||||||
|
import { getTriggerBlocksServerTool } from '../tools/server/blocks/get-trigger-blocks'
|
||||||
|
import { searchDocumentationServerTool } from '../tools/server/docs/search-documentation'
|
||||||
|
import { knowledgeBaseServerTool } from '../tools/server/knowledge/knowledge-base'
|
||||||
|
import { CheckoffTodoInput, checkoffTodoServerTool } from '../tools/server/other/checkoff-todo'
|
||||||
|
import { makeApiRequestServerTool } from '../tools/server/other/make-api-request'
|
||||||
|
import {
|
||||||
|
MarkTodoInProgressInput,
|
||||||
|
markTodoInProgressServerTool,
|
||||||
|
} from '../tools/server/other/mark-todo-in-progress'
|
||||||
|
import { searchOnlineServerTool } from '../tools/server/other/search-online'
|
||||||
|
import { SleepInput, sleepServerTool } from '../tools/server/other/sleep'
|
||||||
|
import { setContextServerTool } from '../tools/server/context/set-context'
|
||||||
|
import { getCredentialsServerTool } from '../tools/server/user/get-credentials'
|
||||||
|
import { setEnvironmentVariablesServerTool } from '../tools/server/user/set-environment-variables'
|
||||||
|
import {
|
||||||
|
CheckDeploymentStatusInput,
|
||||||
|
checkDeploymentStatusServerTool,
|
||||||
|
} from '../tools/server/workflow/check-deployment-status'
|
||||||
|
import {
|
||||||
|
CreateWorkspaceMcpServerInput,
|
||||||
|
createWorkspaceMcpServerServerTool,
|
||||||
|
} from '../tools/server/workflow/create-workspace-mcp-server'
|
||||||
|
import { DeployApiInput, deployApiServerTool } from '../tools/server/workflow/deploy-api'
|
||||||
|
import { DeployChatInput, deployChatServerTool } from '../tools/server/workflow/deploy-chat'
|
||||||
|
import { DeployMcpInput, deployMcpServerTool } from '../tools/server/workflow/deploy-mcp'
|
||||||
|
import { editWorkflowServerTool } from '../tools/server/workflow/edit-workflow'
|
||||||
|
import {
|
||||||
|
GetBlockOutputsInput,
|
||||||
|
getBlockOutputsServerTool,
|
||||||
|
} from '../tools/server/workflow/get-block-outputs'
|
||||||
|
import {
|
||||||
|
GetUserWorkflowInput,
|
||||||
|
getUserWorkflowServerTool,
|
||||||
|
} from '../tools/server/workflow/get-user-workflow'
|
||||||
|
import { getWorkflowConsoleServerTool } from '../tools/server/workflow/get-workflow-console'
|
||||||
|
import {
|
||||||
|
GetWorkflowFromNameInput,
|
||||||
|
getWorkflowFromNameServerTool,
|
||||||
|
} from '../tools/server/workflow/get-workflow-from-name'
|
||||||
|
import { listUserWorkflowsServerTool } from '../tools/server/workflow/list-user-workflows'
|
||||||
|
import {
|
||||||
|
ListWorkspaceMcpServersInput,
|
||||||
|
listWorkspaceMcpServersServerTool,
|
||||||
|
} from '../tools/server/workflow/list-workspace-mcp-servers'
|
||||||
|
import { RedeployInput, redeployServerTool } from '../tools/server/workflow/redeploy'
|
||||||
|
import { RunWorkflowInput, runWorkflowServerTool } from '../tools/server/workflow/run-workflow'
|
||||||
|
import {
|
||||||
|
SetGlobalWorkflowVariablesInput,
|
||||||
|
setGlobalWorkflowVariablesServerTool,
|
||||||
|
} from '../tools/server/workflow/set-global-workflow-variables'
|
||||||
|
import {
|
||||||
|
GetBlockUpstreamReferencesInput,
|
||||||
|
getBlockUpstreamReferencesServerTool,
|
||||||
|
} from '../tools/server/workflow/get-block-upstream-references'
|
||||||
|
import {
|
||||||
|
GetWorkflowDataInput,
|
||||||
|
getWorkflowDataServerTool,
|
||||||
|
} from '../tools/server/workflow/get-workflow-data'
|
||||||
|
import {
|
||||||
|
ManageCustomToolInput,
|
||||||
|
manageCustomToolServerTool,
|
||||||
|
} from '../tools/server/workflow/manage-custom-tool'
|
||||||
|
import {
|
||||||
|
ManageMcpToolInput,
|
||||||
|
manageMcpToolServerTool,
|
||||||
|
} from '../tools/server/workflow/manage-mcp-tool'
|
||||||
|
// Import schemas
|
||||||
|
import {
|
||||||
|
EditWorkflowInput,
|
||||||
|
GetBlockConfigInput,
|
||||||
|
GetBlockOptionsInput,
|
||||||
|
GetBlocksAndToolsInput,
|
||||||
|
GetBlocksMetadataInput,
|
||||||
|
GetCredentialsInput,
|
||||||
|
GetTriggerBlocksInput,
|
||||||
|
GetWorkflowConsoleInput,
|
||||||
|
KnowledgeBaseArgsSchema,
|
||||||
|
ListUserWorkflowsInput,
|
||||||
|
MakeApiRequestInput,
|
||||||
|
SearchDocumentationInput,
|
||||||
|
SearchOnlineInput,
|
||||||
|
SetContextInput,
|
||||||
|
SetEnvironmentVariablesInput,
|
||||||
|
} from '../tools/shared/schemas'
|
||||||
|
import type { ExecutionContext, ToolResult } from './types'
|
||||||
|
import { errorResult, successResult } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('ToolRegistry')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context type for server tools.
|
||||||
|
* This is the full execution context passed to tools that need workflow/workspace info.
|
||||||
|
*/
|
||||||
|
type ServerToolContext =
|
||||||
|
| {
|
||||||
|
userId: string
|
||||||
|
workflowId?: string
|
||||||
|
workspaceId?: string
|
||||||
|
}
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a typed executor wrapper.
|
||||||
|
* This provides a clean boundary between our registry (unknown args)
|
||||||
|
* and the underlying typed server tools.
|
||||||
|
*
|
||||||
|
* The generic TArgs is inferred from the Zod schema, ensuring type safety
|
||||||
|
* at compile time while allowing runtime validation.
|
||||||
|
*/
|
||||||
|
function createExecutor<TArgs, TResult>(
|
||||||
|
serverTool: { execute: (args: TArgs, ctx?: ServerToolContext) => Promise<TResult> },
|
||||||
|
options: { passContext: boolean } = { passContext: true }
|
||||||
|
): (args: unknown, ctx: ServerToolContext) => Promise<unknown> {
|
||||||
|
return (args, ctx) => {
|
||||||
|
// After Zod validation, we know args matches TArgs
|
||||||
|
// This cast is safe because validation happens before execution
|
||||||
|
const typedArgs = args as TArgs
|
||||||
|
return options.passContext ? serverTool.execute(typedArgs, ctx) : serverTool.execute(typedArgs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool registration entry.
|
||||||
|
*/
|
||||||
|
interface ToolRegistration {
|
||||||
|
/** Zod schema for input validation (optional) */
|
||||||
|
inputSchema?: z.ZodType
|
||||||
|
/** Whether this tool requires authentication */
|
||||||
|
requiresAuth: boolean
|
||||||
|
/** The underlying execute function */
|
||||||
|
execute: (args: unknown, context: ServerToolContext) => Promise<unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The tool registry - maps tool names to their configurations.
|
||||||
|
*
|
||||||
|
* Each tool is registered with:
|
||||||
|
* - inputSchema: Zod schema for validation (optional)
|
||||||
|
* - requiresAuth: Whether userId is required
|
||||||
|
* - execute: The underlying server tool's execute function
|
||||||
|
*/
|
||||||
|
const TOOL_REGISTRY: Record<string, ToolRegistration> = {
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Block Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
get_blocks_and_tools: {
|
||||||
|
inputSchema: GetBlocksAndToolsInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlocksAndToolsServerTool),
|
||||||
|
},
|
||||||
|
get_block_config: {
|
||||||
|
inputSchema: GetBlockConfigInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlockConfigServerTool),
|
||||||
|
},
|
||||||
|
get_block_options: {
|
||||||
|
inputSchema: GetBlockOptionsInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlockOptionsServerTool),
|
||||||
|
},
|
||||||
|
get_blocks_metadata: {
|
||||||
|
inputSchema: GetBlocksMetadataInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlocksMetadataServerTool),
|
||||||
|
},
|
||||||
|
get_trigger_blocks: {
|
||||||
|
inputSchema: GetTriggerBlocksInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getTriggerBlocksServerTool),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Workflow Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
edit_workflow: {
|
||||||
|
inputSchema: EditWorkflowInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(editWorkflowServerTool),
|
||||||
|
},
|
||||||
|
get_workflow_console: {
|
||||||
|
inputSchema: GetWorkflowConsoleInput,
|
||||||
|
requiresAuth: false, // Tool validates workflowId itself
|
||||||
|
execute: createExecutor(getWorkflowConsoleServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
list_user_workflows: {
|
||||||
|
inputSchema: ListUserWorkflowsInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(listUserWorkflowsServerTool),
|
||||||
|
},
|
||||||
|
get_workflow_from_name: {
|
||||||
|
inputSchema: GetWorkflowFromNameInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getWorkflowFromNameServerTool),
|
||||||
|
},
|
||||||
|
check_deployment_status: {
|
||||||
|
inputSchema: CheckDeploymentStatusInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(checkDeploymentStatusServerTool),
|
||||||
|
},
|
||||||
|
list_workspace_mcp_servers: {
|
||||||
|
inputSchema: ListWorkspaceMcpServersInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(listWorkspaceMcpServersServerTool),
|
||||||
|
},
|
||||||
|
set_global_workflow_variables: {
|
||||||
|
inputSchema: SetGlobalWorkflowVariablesInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(setGlobalWorkflowVariablesServerTool),
|
||||||
|
},
|
||||||
|
redeploy: {
|
||||||
|
inputSchema: RedeployInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(redeployServerTool),
|
||||||
|
},
|
||||||
|
create_workspace_mcp_server: {
|
||||||
|
inputSchema: CreateWorkspaceMcpServerInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(createWorkspaceMcpServerServerTool),
|
||||||
|
},
|
||||||
|
deploy_api: {
|
||||||
|
inputSchema: DeployApiInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(deployApiServerTool),
|
||||||
|
},
|
||||||
|
deploy_chat: {
|
||||||
|
inputSchema: DeployChatInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(deployChatServerTool),
|
||||||
|
},
|
||||||
|
deploy_mcp: {
|
||||||
|
inputSchema: DeployMcpInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(deployMcpServerTool),
|
||||||
|
},
|
||||||
|
run_workflow: {
|
||||||
|
inputSchema: RunWorkflowInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(runWorkflowServerTool),
|
||||||
|
},
|
||||||
|
get_user_workflow: {
|
||||||
|
inputSchema: GetUserWorkflowInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getUserWorkflowServerTool),
|
||||||
|
},
|
||||||
|
get_block_outputs: {
|
||||||
|
inputSchema: GetBlockOutputsInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlockOutputsServerTool),
|
||||||
|
},
|
||||||
|
get_block_upstream_references: {
|
||||||
|
inputSchema: GetBlockUpstreamReferencesInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getBlockUpstreamReferencesServerTool),
|
||||||
|
},
|
||||||
|
get_workflow_data: {
|
||||||
|
inputSchema: GetWorkflowDataInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getWorkflowDataServerTool),
|
||||||
|
},
|
||||||
|
manage_custom_tool: {
|
||||||
|
inputSchema: ManageCustomToolInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(manageCustomToolServerTool),
|
||||||
|
},
|
||||||
|
manage_mcp_tool: {
|
||||||
|
inputSchema: ManageMcpToolInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(manageMcpToolServerTool),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Search Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
search_documentation: {
|
||||||
|
inputSchema: SearchDocumentationInput,
|
||||||
|
requiresAuth: false,
|
||||||
|
execute: createExecutor(searchDocumentationServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
search_online: {
|
||||||
|
inputSchema: SearchOnlineInput,
|
||||||
|
requiresAuth: false,
|
||||||
|
execute: createExecutor(searchOnlineServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
make_api_request: {
|
||||||
|
inputSchema: MakeApiRequestInput,
|
||||||
|
requiresAuth: false,
|
||||||
|
execute: createExecutor(makeApiRequestServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Knowledge Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
knowledge_base: {
|
||||||
|
inputSchema: KnowledgeBaseArgsSchema,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(knowledgeBaseServerTool),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// User Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
get_credentials: {
|
||||||
|
inputSchema: GetCredentialsInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(getCredentialsServerTool),
|
||||||
|
},
|
||||||
|
set_environment_variables: {
|
||||||
|
inputSchema: SetEnvironmentVariablesInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(setEnvironmentVariablesServerTool),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Context Tools (for headless mode)
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
set_context: {
|
||||||
|
inputSchema: SetContextInput,
|
||||||
|
requiresAuth: true,
|
||||||
|
execute: createExecutor(setContextServerTool),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Todo Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
checkoff_todo: {
|
||||||
|
inputSchema: CheckoffTodoInput,
|
||||||
|
requiresAuth: false, // Just returns success, no auth needed
|
||||||
|
execute: createExecutor(checkoffTodoServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
mark_todo_in_progress: {
|
||||||
|
inputSchema: MarkTodoInProgressInput,
|
||||||
|
requiresAuth: false,
|
||||||
|
execute: createExecutor(markTodoInProgressServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// Utility Tools
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
sleep: {
|
||||||
|
inputSchema: SleepInput,
|
||||||
|
requiresAuth: false,
|
||||||
|
execute: createExecutor(sleepServerTool, { passContext: false }),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List of all server-executed tool names.
|
||||||
|
* Export this so clients know which tools NOT to execute locally.
|
||||||
|
*/
|
||||||
|
export const SERVER_EXECUTED_TOOLS = Object.keys(TOOL_REGISTRY)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool is registered for server execution.
|
||||||
|
*/
|
||||||
|
export function isServerExecutedTool(toolName: string): boolean {
|
||||||
|
return toolName in TOOL_REGISTRY
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool with proper validation and error handling.
|
||||||
|
*
|
||||||
|
* This is the main entry point for tool execution. It:
|
||||||
|
* 1. Looks up the tool in the registry
|
||||||
|
* 2. Validates input against the schema (if provided)
|
||||||
|
* 3. Checks authentication requirements
|
||||||
|
* 4. Executes the tool
|
||||||
|
* 5. Returns a standardized ToolResult
|
||||||
|
*/
|
||||||
|
export async function executeRegisteredTool(
|
||||||
|
toolName: string,
|
||||||
|
args: unknown,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolResult> {
|
||||||
|
const registration = TOOL_REGISTRY[toolName]
|
||||||
|
|
||||||
|
if (!registration) {
|
||||||
|
logger.warn('Unknown tool requested', { toolName })
|
||||||
|
return errorResult('UNKNOWN_TOOL', `Tool '${toolName}' is not registered for server execution`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check authentication requirement
|
||||||
|
if (registration.requiresAuth && !context.userId) {
|
||||||
|
logger.error('Authentication required but not provided', { toolName })
|
||||||
|
return errorResult('AUTH_REQUIRED', `Tool '${toolName}' requires authentication`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate input if schema is provided
|
||||||
|
let validatedArgs: unknown = args ?? {}
|
||||||
|
if (registration.inputSchema) {
|
||||||
|
const parseResult = registration.inputSchema.safeParse(args ?? {})
|
||||||
|
if (!parseResult.success) {
|
||||||
|
logger.warn('Input validation failed', {
|
||||||
|
toolName,
|
||||||
|
errors: parseResult.error.flatten(),
|
||||||
|
})
|
||||||
|
return errorResult('VALIDATION_ERROR', 'Invalid input arguments', {
|
||||||
|
errors: parseResult.error.flatten().fieldErrors,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
validatedArgs = parseResult.data
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the tool
|
||||||
|
try {
|
||||||
|
// Pass the full execution context so tools can access workflowId/workspaceId
|
||||||
|
const toolContext = context.userId
|
||||||
|
? {
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
}
|
||||||
|
: undefined
|
||||||
|
const result = await registration.execute(validatedArgs, toolContext)
|
||||||
|
return successResult(result)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
logger.error('Tool execution failed', { toolName, error: message })
|
||||||
|
return errorResult('EXECUTION_ERROR', message)
|
||||||
|
}
|
||||||
|
}
|
||||||
432
apps/sim/lib/copilot/server-executor/stream-handler.ts
Normal file
432
apps/sim/lib/copilot/server-executor/stream-handler.ts
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
/**
|
||||||
|
* SSE Stream Tool Execution Handler
|
||||||
|
*
|
||||||
|
* This module intercepts tool_call events from the Go copilot SSE stream
|
||||||
|
* and executes server-side tools, calling mark-complete to return results.
|
||||||
|
*
|
||||||
|
* Key features:
|
||||||
|
* - Non-blocking: Tool execution happens in parallel with stream forwarding
|
||||||
|
* - Resilient: Uses Redis for state persistence across disconnects
|
||||||
|
* - Transparent: Still forwards all events to browser for UI updates
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
import { executeToolOnServer, isServerExecutedTool } from './index'
|
||||||
|
import type { ExecutionContext } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('StreamToolHandler')
|
||||||
|
|
||||||
|
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||||
|
|
||||||
|
/** Redis key prefix for tool execution state */
|
||||||
|
const REDIS_KEY_PREFIX = 'copilot:tool_exec:'
|
||||||
|
|
||||||
|
/** TTL for Redis entries (1 hour) */
|
||||||
|
const REDIS_TTL_SECONDS = 60 * 60
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool execution state stored in Redis
|
||||||
|
*/
|
||||||
|
interface ToolExecutionState {
|
||||||
|
toolCallId: string
|
||||||
|
toolName: string
|
||||||
|
status: 'pending' | 'executing' | 'completed' | 'failed'
|
||||||
|
userId: string
|
||||||
|
workflowId?: string
|
||||||
|
workspaceId?: string
|
||||||
|
chatId?: string
|
||||||
|
startedAt: number
|
||||||
|
completedAt?: number
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool call data from SSE event
|
||||||
|
*/
|
||||||
|
interface ToolCallEvent {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
arguments: Record<string, unknown>
|
||||||
|
partial?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save tool execution state to Redis.
|
||||||
|
*/
|
||||||
|
async function saveToolState(state: ToolExecutionState): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
logger.debug('Redis not available, skipping state save', {
|
||||||
|
toolCallId: state.toolCallId,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const key = `${REDIS_KEY_PREFIX}${state.toolCallId}`
|
||||||
|
await redis.setex(key, REDIS_TTL_SECONDS, JSON.stringify(state))
|
||||||
|
logger.debug('Saved tool execution state to Redis', {
|
||||||
|
toolCallId: state.toolCallId,
|
||||||
|
status: state.status,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to save tool state to Redis', {
|
||||||
|
toolCallId: state.toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool execution state from Redis.
|
||||||
|
*/
|
||||||
|
async function getToolState(toolCallId: string): Promise<ToolExecutionState | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return null
|
||||||
|
|
||||||
|
try {
|
||||||
|
const key = `${REDIS_KEY_PREFIX}${toolCallId}`
|
||||||
|
const data = await redis.get(key)
|
||||||
|
if (!data) return null
|
||||||
|
return JSON.parse(data) as ToolExecutionState
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to get tool state from Redis', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a tool as complete by calling the Go copilot endpoint.
|
||||||
|
*/
|
||||||
|
async function markToolComplete(
|
||||||
|
toolCallId: string,
|
||||||
|
toolName: string,
|
||||||
|
status: number,
|
||||||
|
message?: unknown,
|
||||||
|
data?: unknown
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const payload = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status,
|
||||||
|
message,
|
||||||
|
data,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Marking tool complete from server', {
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
status,
|
||||||
|
hasMessage: message !== undefined,
|
||||||
|
hasData: data !== undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify(payload),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text().catch(() => '')
|
||||||
|
logger.error('Failed to mark tool complete', {
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
status: response.status,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Tool marked complete successfully', { toolCallId, toolName })
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error marking tool complete', {
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a tool call event from the SSE stream.
|
||||||
|
*
|
||||||
|
* If the tool is server-executed:
|
||||||
|
* 1. Execute it using the server executor
|
||||||
|
* 2. Call mark-complete to return result to Go
|
||||||
|
*
|
||||||
|
* This runs asynchronously and doesn't block the stream.
|
||||||
|
*
|
||||||
|
* @returns true if this tool will be handled server-side, false if client should handle
|
||||||
|
*/
|
||||||
|
export async function handleToolCallEvent(
|
||||||
|
event: ToolCallEvent,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<boolean> {
|
||||||
|
// Skip partial tool calls (streaming arguments)
|
||||||
|
if (event.partial) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this tool should be executed server-side
|
||||||
|
if (!isServerExecutedTool(event.name)) {
|
||||||
|
logger.debug('Tool not server-executed, client will handle', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this tool is already being executed (recovery scenario)
|
||||||
|
const existingState = await getToolState(event.id)
|
||||||
|
if (existingState) {
|
||||||
|
if (existingState.status === 'executing') {
|
||||||
|
logger.info('Tool already being executed (recovery scenario)', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
startedAt: existingState.startedAt,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if (existingState.status === 'completed') {
|
||||||
|
logger.info('Tool already completed (recovery scenario)', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
completedAt: existingState.completedAt,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Handling tool call server-side', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
userId: context.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Save initial state to Redis
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'pending',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: Date.now(),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Execute asynchronously - don't await here to avoid blocking stream
|
||||||
|
executeToolServerSide(event, context).catch((error) => {
|
||||||
|
logger.error('Async tool execution failed', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool server-side and mark it complete.
|
||||||
|
* This is called asynchronously from handleToolCallEvent.
|
||||||
|
*/
|
||||||
|
async function executeToolServerSide(
|
||||||
|
event: ToolCallEvent,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<void> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
|
||||||
|
// Update state to executing
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'executing',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: startTime,
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await executeToolOnServer(event.name, event.arguments, context)
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
// This shouldn't happen since we checked isServerExecutedTool
|
||||||
|
logger.error('executeToolOnServer returned null for registered tool', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
})
|
||||||
|
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'failed',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: startTime,
|
||||||
|
completedAt: Date.now(),
|
||||||
|
error: 'Internal error: tool not found',
|
||||||
|
})
|
||||||
|
|
||||||
|
await markToolComplete(event.id, event.name, 500, 'Internal error: tool not found')
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const durationMs = Date.now() - startTime
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
logger.info('Tool executed successfully', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
durationMs,
|
||||||
|
})
|
||||||
|
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'completed',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: startTime,
|
||||||
|
completedAt: Date.now(),
|
||||||
|
result: result.data,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mark complete with success
|
||||||
|
await markToolComplete(
|
||||||
|
event.id,
|
||||||
|
event.name,
|
||||||
|
200,
|
||||||
|
undefined, // message
|
||||||
|
result.data // data
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
logger.warn('Tool execution failed', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
durationMs,
|
||||||
|
error: result.error,
|
||||||
|
})
|
||||||
|
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'failed',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: startTime,
|
||||||
|
completedAt: Date.now(),
|
||||||
|
error: result.error?.message,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mark complete with error
|
||||||
|
await markToolComplete(
|
||||||
|
event.id,
|
||||||
|
event.name,
|
||||||
|
400,
|
||||||
|
result.error?.message ?? 'Tool execution failed',
|
||||||
|
result.error?.details
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const durationMs = Date.now() - startTime
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
|
||||||
|
logger.error('Tool execution threw exception', {
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
durationMs,
|
||||||
|
error: message,
|
||||||
|
})
|
||||||
|
|
||||||
|
await saveToolState({
|
||||||
|
toolCallId: event.id,
|
||||||
|
toolName: event.name,
|
||||||
|
status: 'failed',
|
||||||
|
userId: context.userId,
|
||||||
|
workflowId: context.workflowId,
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
chatId: context.chatId,
|
||||||
|
startedAt: startTime,
|
||||||
|
completedAt: Date.now(),
|
||||||
|
error: message,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Mark complete with error
|
||||||
|
await markToolComplete(event.id, event.name, 500, message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In-memory fallback for tracking server-handled tools when Redis is unavailable.
|
||||||
|
*/
|
||||||
|
const serverHandledTools = new Map<string, { toolName: string; handledAt: number }>()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a tool as being handled server-side.
|
||||||
|
*/
|
||||||
|
export function registerServerHandledTool(toolCallId: string, toolName: string): void {
|
||||||
|
serverHandledTools.set(toolCallId, {
|
||||||
|
toolName,
|
||||||
|
handledAt: Date.now(),
|
||||||
|
})
|
||||||
|
|
||||||
|
// Clean up old entries (older than 1 hour)
|
||||||
|
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||||
|
for (const [id, info] of serverHandledTools.entries()) {
|
||||||
|
if (info.handledAt < oneHourAgo) {
|
||||||
|
serverHandledTools.delete(id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool was handled server-side.
|
||||||
|
*/
|
||||||
|
export async function wasToolHandledServerSide(toolCallId: string): Promise<boolean> {
|
||||||
|
// Check in-memory first
|
||||||
|
if (serverHandledTools.has(toolCallId)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Redis
|
||||||
|
const state = await getToolState(toolCallId)
|
||||||
|
return state !== null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the execution state of a tool.
|
||||||
|
* Useful for client reconnection scenarios.
|
||||||
|
*/
|
||||||
|
export async function getToolExecutionState(
|
||||||
|
toolCallId: string
|
||||||
|
): Promise<ToolExecutionState | null> {
|
||||||
|
return getToolState(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get list of server-executed tool names for client reference.
|
||||||
|
*/
|
||||||
|
export { SERVER_EXECUTED_TOOLS } from './registry'
|
||||||
92
apps/sim/lib/copilot/server-executor/types.ts
Normal file
92
apps/sim/lib/copilot/server-executor/types.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
/**
|
||||||
|
* Type definitions for the server executor.
|
||||||
|
*
|
||||||
|
* This provides a clean, type-safe interface for tool execution
|
||||||
|
* without any 'any' types.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { z } from 'zod'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standard result type for all tool executions.
|
||||||
|
* This is the contract between server executors and the chat route.
|
||||||
|
*/
|
||||||
|
export interface ToolResult<T = unknown> {
|
||||||
|
success: boolean
|
||||||
|
data?: T
|
||||||
|
error?: {
|
||||||
|
code: string
|
||||||
|
message: string
|
||||||
|
details?: Record<string, unknown>
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context passed to tool executors.
|
||||||
|
*
|
||||||
|
* This context is passed from Go copilot to SIM on each tool_call event.
|
||||||
|
* In client mode, workflowId/workspaceId come from the initial request.
|
||||||
|
* In headless mode, they can be set dynamically via the set_context tool.
|
||||||
|
*/
|
||||||
|
export interface ExecutionContext {
|
||||||
|
userId: string
|
||||||
|
workflowId?: string
|
||||||
|
workspaceId?: string
|
||||||
|
chatId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration for a registered tool.
|
||||||
|
* This defines how a tool should be validated and executed.
|
||||||
|
*/
|
||||||
|
export interface ToolConfig<
|
||||||
|
TInputSchema extends z.ZodType = z.ZodType,
|
||||||
|
TOutputSchema extends z.ZodType = z.ZodType,
|
||||||
|
> {
|
||||||
|
/** The canonical name of the tool */
|
||||||
|
name: string
|
||||||
|
|
||||||
|
/** Zod schema for validating input args (optional - if not provided, args pass through) */
|
||||||
|
inputSchema?: TInputSchema
|
||||||
|
|
||||||
|
/** Zod schema for validating output (optional - if not provided, output passes through) */
|
||||||
|
outputSchema?: TOutputSchema
|
||||||
|
|
||||||
|
/** Whether context (userId) is required for this tool */
|
||||||
|
requiresAuth?: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The execute function.
|
||||||
|
* Takes validated args and context, returns result data.
|
||||||
|
*/
|
||||||
|
execute: (
|
||||||
|
args: TInputSchema extends z.ZodType ? z.infer<TInputSchema> : unknown,
|
||||||
|
context: ExecutionContext
|
||||||
|
) => Promise<TOutputSchema extends z.ZodType ? z.infer<TOutputSchema> : unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type for a tool executor function (after wrapping).
|
||||||
|
*/
|
||||||
|
export type ToolExecutor = (args: unknown, context: ExecutionContext) => Promise<ToolResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a success result.
|
||||||
|
*/
|
||||||
|
export function successResult<T>(data: T): ToolResult<T> {
|
||||||
|
return { success: true, data }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create an error result.
|
||||||
|
*/
|
||||||
|
export function errorResult(
|
||||||
|
code: string,
|
||||||
|
message: string,
|
||||||
|
details?: Record<string, unknown>
|
||||||
|
): ToolResult {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: { code, message, details },
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,23 +1,11 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { FileCode, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { FileCode, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockConfigInput,
|
|
||||||
GetBlockConfigResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
import { getLatestBlock } from '@/blocks/registry'
|
||||||
|
|
||||||
interface GetBlockConfigArgs {
|
|
||||||
blockType: string
|
|
||||||
operation?: string
|
|
||||||
trigger?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockConfigClientTool extends BaseClientTool {
|
export class GetBlockConfigClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_block_config'
|
static readonly id = 'get_block_config'
|
||||||
|
|
||||||
@@ -63,38 +51,6 @@ export class GetBlockConfigClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetBlockConfigArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetBlockConfigClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockType, operation, trigger } = GetBlockConfigInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
toolName: 'get_block_config',
|
|
||||||
payload: { blockType, operation, trigger },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockConfigResult.parse(parsed.result)
|
|
||||||
|
|
||||||
const inputCount = Object.keys(result.inputs).length
|
|
||||||
const outputCount = Object.keys(result.outputs).length
|
|
||||||
await this.markToolComplete(200, { inputs: inputCount, outputs: outputCount }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,11 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockOptionsInput,
|
|
||||||
GetBlockOptionsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
import { getLatestBlock } from '@/blocks/registry'
|
||||||
|
|
||||||
interface GetBlockOptionsArgs {
|
|
||||||
blockId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockOptionsClientTool extends BaseClientTool {
|
export class GetBlockOptionsClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_block_options'
|
static readonly id = 'get_block_options'
|
||||||
|
|
||||||
@@ -65,46 +55,6 @@ export class GetBlockOptionsClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetBlockOptionsArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetBlockOptionsClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Handle both camelCase and snake_case parameter names, plus blockType as an alias
|
|
||||||
const normalizedArgs = args
|
|
||||||
? {
|
|
||||||
blockId:
|
|
||||||
args.blockId ||
|
|
||||||
(args as any).block_id ||
|
|
||||||
(args as any).blockType ||
|
|
||||||
(args as any).block_type,
|
|
||||||
}
|
|
||||||
: {}
|
|
||||||
|
|
||||||
logger.info('execute called', { originalArgs: args, normalizedArgs })
|
|
||||||
|
|
||||||
const { blockId } = GetBlockOptionsInput.parse(normalizedArgs)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_block_options', payload: { blockId } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockOptionsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { operations: result.operations.length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Blocks, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { Blocks, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksAndToolsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_blocks_and_tools'
|
static readonly id = 'get_blocks_and_tools'
|
||||||
@@ -30,30 +25,6 @@ export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
|||||||
interrupt: undefined,
|
interrupt: undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetBlocksAndToolsClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_and_tools', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksAndToolsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved blocks and tools', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,19 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksMetadataInput,
|
|
||||||
GetBlocksMetadataResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface GetBlocksMetadataArgs {
|
|
||||||
blockIds: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlocksMetadataClientTool extends BaseClientTool {
|
export class GetBlocksMetadataClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_blocks_metadata'
|
static readonly id = 'get_blocks_metadata'
|
||||||
@@ -63,33 +53,6 @@ export class GetBlocksMetadataClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetBlocksMetadataArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetBlocksMetadataClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockIds } = GetBlocksMetadataInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_metadata', payload: { blockIds } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksMetadataResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { retrieved: Object.keys(result.metadata).length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetTriggerBlocksResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetTriggerBlocksClientTool extends BaseClientTool {
|
export class GetTriggerBlocksClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_trigger_blocks'
|
static readonly id = 'get_trigger_blocks'
|
||||||
@@ -30,35 +25,6 @@ export class GetTriggerBlocksClientTool extends BaseClientTool {
|
|||||||
interrupt: undefined,
|
interrupt: undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetTriggerBlocksClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_trigger_blocks', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
try {
|
|
||||||
const errorJson = JSON.parse(errorText)
|
|
||||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
|
||||||
} catch {
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetTriggerBlocksResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved trigger blocks', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Database, Loader2, MinusCircle, PlusCircle, XCircle } from 'lucide-react'
|
import { Database, Loader2, MinusCircle, PlusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
import type { KnowledgeBaseArgs } from '@/lib/copilot/tools/shared/schemas'
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
type KnowledgeBaseArgs,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -89,42 +85,6 @@ export class KnowledgeBaseClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
await this.execute(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
const logger = createLogger('KnowledgeBaseClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const payload: KnowledgeBaseArgs = { ...(args || { operation: 'list' }) }
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'knowledge_base', payload }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Knowledge base operation completed', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to access knowledge base')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Check, Loader2, XCircle } from 'lucide-react'
|
import { Check, Loader2, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,11 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
|
|
||||||
interface CheckoffTodoArgs {
|
|
||||||
id?: string
|
|
||||||
todoId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CheckoffTodoClientTool extends BaseClientTool {
|
export class CheckoffTodoClientTool extends BaseClientTool {
|
||||||
static readonly id = 'checkoff_todo'
|
static readonly id = 'checkoff_todo'
|
||||||
|
|
||||||
@@ -27,35 +21,6 @@ export class CheckoffTodoClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: CheckoffTodoArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('CheckoffTodoClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const todoId = args?.id || args?.todoId
|
|
||||||
if (!todoId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'Missing todo id')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
|
||||||
const store = useCopilotStore.getState()
|
|
||||||
if (store.updatePlanTodoStatus) {
|
|
||||||
store.updatePlanTodoStatus(todoId, 'completed')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Todo checked off', { todoId })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to check off todo')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Globe2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { Globe2, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,15 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface MakeApiRequestArgs {
|
|
||||||
url: string
|
|
||||||
method: 'GET' | 'POST' | 'PUT'
|
|
||||||
queryParams?: Record<string, string | number | boolean>
|
|
||||||
headers?: Record<string, string>
|
|
||||||
body?: any
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MakeApiRequestClientTool extends BaseClientTool {
|
export class MakeApiRequestClientTool extends BaseClientTool {
|
||||||
static readonly id = 'make_api_request'
|
static readonly id = 'make_api_request'
|
||||||
@@ -88,39 +78,8 @@ export class MakeApiRequestClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: MakeApiRequestArgs): Promise<void> {
|
|
||||||
const logger = createLogger('MakeApiRequestClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'make_api_request', payload: args || {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'API request executed', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'API request failed')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: MakeApiRequestArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,11 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
|
|
||||||
interface MarkTodoInProgressArgs {
|
|
||||||
id?: string
|
|
||||||
todoId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MarkTodoInProgressClientTool extends BaseClientTool {
|
export class MarkTodoInProgressClientTool extends BaseClientTool {
|
||||||
static readonly id = 'mark_todo_in_progress'
|
static readonly id = 'mark_todo_in_progress'
|
||||||
|
|
||||||
@@ -30,35 +24,6 @@ export class MarkTodoInProgressClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: MarkTodoInProgressArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('MarkTodoInProgressClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const todoId = args?.id || args?.todoId
|
|
||||||
if (!todoId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'Missing todo id')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
|
||||||
const store = useCopilotStore.getState()
|
|
||||||
if (store.updatePlanTodoStatus) {
|
|
||||||
store.updatePlanTodoStatus(todoId, 'executing')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Todo marked in progress', { todoId })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to mark todo in progress')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { BookOpen, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface SearchDocumentationArgs {
|
|
||||||
query: string
|
|
||||||
topK?: number
|
|
||||||
threshold?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SearchDocumentationClientTool extends BaseClientTool {
|
export class SearchDocumentationClientTool extends BaseClientTool {
|
||||||
static readonly id = 'search_documentation'
|
static readonly id = 'search_documentation'
|
||||||
@@ -53,28 +45,6 @@ export class SearchDocumentationClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: SearchDocumentationArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('SearchDocumentationClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'search_documentation', payload: args || {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Documentation search complete', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Documentation search failed')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,7 +46,6 @@ export class SearchOnlineClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
return
|
// Client tool provides UI metadata only
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
|
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -7,16 +6,6 @@ import {
|
|||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
|
|
||||||
/** Maximum sleep duration in seconds (3 minutes) */
|
|
||||||
const MAX_SLEEP_SECONDS = 180
|
|
||||||
|
|
||||||
/** Track sleep start times for calculating elapsed time on wake */
|
|
||||||
const sleepStartTimes: Record<string, number> = {}
|
|
||||||
|
|
||||||
interface SleepArgs {
|
|
||||||
seconds?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format seconds into a human-readable duration string
|
* Format seconds into a human-readable duration string
|
||||||
*/
|
*/
|
||||||
@@ -87,70 +76,8 @@ export class SleepClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
* Get elapsed seconds since sleep started
|
// Client tool provides UI metadata only
|
||||||
*/
|
|
||||||
getElapsedSeconds(): number {
|
|
||||||
const startTime = sleepStartTimes[this.toolCallId]
|
|
||||||
if (!startTime) return 0
|
|
||||||
return (Date.now() - startTime) / 1000
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: SleepArgs): Promise<void> {
|
|
||||||
const logger = createLogger('SleepClientTool')
|
|
||||||
|
|
||||||
// Use a timeout slightly longer than max sleep (3 minutes + buffer)
|
|
||||||
const timeoutMs = (MAX_SLEEP_SECONDS + 30) * 1000
|
|
||||||
|
|
||||||
await this.executeWithTimeout(async () => {
|
|
||||||
const params = args || {}
|
|
||||||
logger.debug('handleAccept() called', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
state: this.getState(),
|
|
||||||
hasArgs: !!args,
|
|
||||||
seconds: params.seconds,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Validate and clamp seconds
|
|
||||||
let seconds = typeof params.seconds === 'number' ? params.seconds : 0
|
|
||||||
if (seconds < 0) seconds = 0
|
|
||||||
if (seconds > MAX_SLEEP_SECONDS) seconds = MAX_SLEEP_SECONDS
|
|
||||||
|
|
||||||
logger.debug('Starting sleep', { seconds })
|
|
||||||
|
|
||||||
// Track start time for elapsed calculation
|
|
||||||
sleepStartTimes[this.toolCallId] = Date.now()
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Sleep for the specified duration
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
|
||||||
|
|
||||||
logger.debug('Sleep completed successfully')
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Slept for ${seconds} seconds`)
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Sleep failed', { error: message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
} finally {
|
|
||||||
// Clean up start time tracking
|
|
||||||
delete sleepStartTimes[this.toolCallId]
|
|
||||||
}
|
|
||||||
}, timeoutMs)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: SleepArgs): Promise<void> {
|
|
||||||
// Auto-execute without confirmation - go straight to executing
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,17 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Key, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { Key, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface GetCredentialsArgs {
|
|
||||||
userId?: string
|
|
||||||
workflowId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetCredentialsClientTool extends BaseClientTool {
|
export class GetCredentialsClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_credentials'
|
static readonly id = 'get_credentials'
|
||||||
@@ -41,33 +33,6 @@ export class GetCredentialsClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetCredentialsArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetCredentialsClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const payload: GetCredentialsArgs = { ...(args || {}) }
|
|
||||||
if (!payload.workflowId && !payload.userId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
|
||||||
}
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_credentials', payload }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Connected integrations fetched', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to fetch connected integrations')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Settings2, X, XCircle } from 'lucide-react'
|
import { Loader2, Settings2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,14 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface SetEnvArgs {
|
|
||||||
variables: Record<string, string>
|
|
||||||
workflowId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SetEnvironmentVariablesClientTool extends BaseClientTool {
|
export class SetEnvironmentVariablesClientTool extends BaseClientTool {
|
||||||
static readonly id = 'set_environment_variables'
|
static readonly id = 'set_environment_variables'
|
||||||
@@ -102,52 +93,8 @@ export class SetEnvironmentVariablesClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: SetEnvArgs): Promise<void> {
|
|
||||||
const logger = createLogger('SetEnvironmentVariablesClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const payload: SetEnvArgs = { ...(args || { variables: {} }) }
|
|
||||||
if (!payload.workflowId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
|
||||||
}
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'set_environment_variables', payload }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Environment variables updated', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
|
|
||||||
// Refresh the environment store so the UI reflects the new variables
|
|
||||||
try {
|
|
||||||
await useEnvironmentStore.getState().loadEnvironmentVariables()
|
|
||||||
logger.info('Environment store refreshed after setting variables')
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to refresh environment store:', error)
|
|
||||||
}
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to set environment variables')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: SetEnvArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,50 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Rocket, X, XCircle } from 'lucide-react'
|
import { Loader2, Rocket, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface CheckDeploymentStatusArgs {
|
|
||||||
workflowId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ApiDeploymentDetails {
|
|
||||||
isDeployed: boolean
|
|
||||||
deployedAt: string | null
|
|
||||||
endpoint: string | null
|
|
||||||
apiKey: string | null
|
|
||||||
needsRedeployment: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ChatDeploymentDetails {
|
|
||||||
isDeployed: boolean
|
|
||||||
chatId: string | null
|
|
||||||
identifier: string | null
|
|
||||||
chatUrl: string | null
|
|
||||||
title: string | null
|
|
||||||
description: string | null
|
|
||||||
authType: string | null
|
|
||||||
allowedEmails: string[] | null
|
|
||||||
outputConfigs: Array<{ blockId: string; path: string }> | null
|
|
||||||
welcomeMessage: string | null
|
|
||||||
primaryColor: string | null
|
|
||||||
hasPassword: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
interface McpDeploymentDetails {
|
|
||||||
isDeployed: boolean
|
|
||||||
servers: Array<{
|
|
||||||
serverId: string
|
|
||||||
serverName: string
|
|
||||||
toolName: string
|
|
||||||
toolDescription: string | null
|
|
||||||
parameterSchema?: Record<string, unknown> | null
|
|
||||||
toolId?: string | null
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||||
static readonly id = 'check_deployment_status'
|
static readonly id = 'check_deployment_status'
|
||||||
@@ -75,141 +34,6 @@ export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
|||||||
interrupt: undefined,
|
interrupt: undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: CheckDeploymentStatusArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('CheckDeploymentStatusClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflowId = args?.workflowId || activeWorkflowId
|
|
||||||
|
|
||||||
if (!workflowId) {
|
|
||||||
throw new Error('No workflow ID provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = workflows[workflowId]
|
|
||||||
const workspaceId = workflow?.workspaceId
|
|
||||||
|
|
||||||
// Fetch deployment status from all sources
|
|
||||||
const [apiDeployRes, chatDeployRes, mcpServersRes] = await Promise.all([
|
|
||||||
fetch(`/api/workflows/${workflowId}/deploy`),
|
|
||||||
fetch(`/api/workflows/${workflowId}/chat/status`),
|
|
||||||
workspaceId ? fetch(`/api/mcp/workflow-servers?workspaceId=${workspaceId}`) : null,
|
|
||||||
])
|
|
||||||
|
|
||||||
const apiDeploy = apiDeployRes.ok ? await apiDeployRes.json() : null
|
|
||||||
const chatDeploy = chatDeployRes.ok ? await chatDeployRes.json() : null
|
|
||||||
const mcpServers = mcpServersRes?.ok ? await mcpServersRes.json() : null
|
|
||||||
|
|
||||||
// API deployment details
|
|
||||||
const isApiDeployed = apiDeploy?.isDeployed || false
|
|
||||||
const appUrl = typeof window !== 'undefined' ? window.location.origin : ''
|
|
||||||
const apiDetails: ApiDeploymentDetails = {
|
|
||||||
isDeployed: isApiDeployed,
|
|
||||||
deployedAt: apiDeploy?.deployedAt || null,
|
|
||||||
endpoint: isApiDeployed ? `${appUrl}/api/workflows/${workflowId}/execute` : null,
|
|
||||||
apiKey: apiDeploy?.apiKey || null,
|
|
||||||
needsRedeployment: apiDeploy?.needsRedeployment === true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Chat deployment details
|
|
||||||
const isChatDeployed = !!(chatDeploy?.isDeployed && chatDeploy?.deployment)
|
|
||||||
const chatDetails: ChatDeploymentDetails = {
|
|
||||||
isDeployed: isChatDeployed,
|
|
||||||
chatId: chatDeploy?.deployment?.id || null,
|
|
||||||
identifier: chatDeploy?.deployment?.identifier || null,
|
|
||||||
chatUrl: isChatDeployed ? `${appUrl}/chat/${chatDeploy?.deployment?.identifier}` : null,
|
|
||||||
title: chatDeploy?.deployment?.title || null,
|
|
||||||
description: chatDeploy?.deployment?.description || null,
|
|
||||||
authType: chatDeploy?.deployment?.authType || null,
|
|
||||||
allowedEmails: Array.isArray(chatDeploy?.deployment?.allowedEmails)
|
|
||||||
? chatDeploy?.deployment?.allowedEmails
|
|
||||||
: null,
|
|
||||||
outputConfigs: Array.isArray(chatDeploy?.deployment?.outputConfigs)
|
|
||||||
? chatDeploy?.deployment?.outputConfigs
|
|
||||||
: null,
|
|
||||||
welcomeMessage: chatDeploy?.deployment?.customizations?.welcomeMessage || null,
|
|
||||||
primaryColor: chatDeploy?.deployment?.customizations?.primaryColor || null,
|
|
||||||
hasPassword: chatDeploy?.deployment?.hasPassword === true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// MCP deployment details - find servers that have this workflow as a tool
|
|
||||||
const mcpServerList = mcpServers?.data?.servers || []
|
|
||||||
const mcpToolDeployments: McpDeploymentDetails['servers'] = []
|
|
||||||
|
|
||||||
for (const server of mcpServerList) {
|
|
||||||
// Check if this workflow is deployed as a tool on this server
|
|
||||||
if (server.toolNames && Array.isArray(server.toolNames)) {
|
|
||||||
// We need to fetch the actual tools to check if this workflow is there
|
|
||||||
try {
|
|
||||||
const toolsRes = await fetch(
|
|
||||||
`/api/mcp/workflow-servers/${server.id}/tools?workspaceId=${workspaceId}`
|
|
||||||
)
|
|
||||||
if (toolsRes.ok) {
|
|
||||||
const toolsData = await toolsRes.json()
|
|
||||||
const tools = toolsData.data?.tools || []
|
|
||||||
for (const tool of tools) {
|
|
||||||
if (tool.workflowId === workflowId) {
|
|
||||||
mcpToolDeployments.push({
|
|
||||||
serverId: server.id,
|
|
||||||
serverName: server.name,
|
|
||||||
toolName: tool.toolName,
|
|
||||||
toolDescription: tool.toolDescription,
|
|
||||||
parameterSchema: tool.parameterSchema ?? null,
|
|
||||||
toolId: tool.id ?? null,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// Skip this server if we can't fetch tools
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const isMcpDeployed = mcpToolDeployments.length > 0
|
|
||||||
const mcpDetails: McpDeploymentDetails = {
|
|
||||||
isDeployed: isMcpDeployed,
|
|
||||||
servers: mcpToolDeployments,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build deployment types list
|
|
||||||
const deploymentTypes: string[] = []
|
|
||||||
if (isApiDeployed) deploymentTypes.push('api')
|
|
||||||
if (isChatDeployed) deploymentTypes.push('chat')
|
|
||||||
if (isMcpDeployed) deploymentTypes.push('mcp')
|
|
||||||
|
|
||||||
const isDeployed = isApiDeployed || isChatDeployed || isMcpDeployed
|
|
||||||
|
|
||||||
// Build summary message
|
|
||||||
let message = ''
|
|
||||||
if (!isDeployed) {
|
|
||||||
message = 'Workflow is not deployed'
|
|
||||||
} else {
|
|
||||||
const parts: string[] = []
|
|
||||||
if (isApiDeployed) parts.push('API')
|
|
||||||
if (isChatDeployed) parts.push(`Chat (${chatDetails.identifier})`)
|
|
||||||
if (isMcpDeployed) {
|
|
||||||
const serverNames = mcpToolDeployments.map((d) => d.serverName).join(', ')
|
|
||||||
parts.push(`MCP (${serverNames})`)
|
|
||||||
}
|
|
||||||
message = `Workflow is deployed as: ${parts.join(', ')}`
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, message, {
|
|
||||||
isDeployed,
|
|
||||||
deploymentTypes,
|
|
||||||
api: apiDetails,
|
|
||||||
chat: chatDetails,
|
|
||||||
mcp: mcpDetails,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Checked deployment status', { isDeployed, deploymentTypes })
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Check deployment status failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to check deployment status')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Plus, Server, XCircle } from 'lucide-react'
|
import { Loader2, Plus, Server, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,7 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
export interface CreateWorkspaceMcpServerArgs {
|
export interface CreateWorkspaceMcpServerArgs {
|
||||||
/** Name of the MCP server */
|
/** Name of the MCP server */
|
||||||
@@ -79,77 +77,6 @@ export class CreateWorkspaceMcpServerClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: CreateWorkspaceMcpServerArgs): Promise<void> {
|
|
||||||
const logger = createLogger('CreateWorkspaceMcpServerClientTool')
|
|
||||||
try {
|
|
||||||
if (!args?.name) {
|
|
||||||
throw new Error('Server name is required')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get workspace ID from active workflow if not provided
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
let workspaceId = args?.workspaceId
|
|
||||||
|
|
||||||
if (!workspaceId && activeWorkflowId) {
|
|
||||||
workspaceId = workflows[activeWorkflowId]?.workspaceId
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('No workspace ID available')
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/mcp/workflow-servers', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
workspaceId,
|
|
||||||
name: args.name.trim(),
|
|
||||||
description: args.description?.trim() || null,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await res.json()
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(data.error || `Failed to create MCP server (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const server = data.data?.server
|
|
||||||
if (!server) {
|
|
||||||
throw new Error('Server creation response missing server data')
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`MCP server "${args.name}" created successfully. You can now deploy workflows to it using deploy_mcp.`,
|
|
||||||
{
|
|
||||||
success: true,
|
|
||||||
serverId: server.id,
|
|
||||||
serverName: server.name,
|
|
||||||
description: server.description,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(`Created MCP server: ${server.name} (${server.id})`)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Failed to create MCP server', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to create MCP server', {
|
|
||||||
success: false,
|
|
||||||
error: e?.message,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: CreateWorkspaceMcpServerArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,8 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { getInputFormatExample } from '@/lib/workflows/operations/deployment-utils'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
|
||||||
@@ -125,161 +122,8 @@ export class DeployApiClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
* Checks if the user has any API keys (workspace or personal)
|
// Client tool provides UI metadata only
|
||||||
*/
|
|
||||||
private async hasApiKeys(workspaceId: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const [workspaceRes, personalRes] = await Promise.all([
|
|
||||||
fetch(`/api/workspaces/${workspaceId}/api-keys`),
|
|
||||||
fetch('/api/users/me/api-keys'),
|
|
||||||
])
|
|
||||||
|
|
||||||
if (!workspaceRes.ok || !personalRes.ok) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
const workspaceData = await workspaceRes.json()
|
|
||||||
const personalData = await personalRes.json()
|
|
||||||
|
|
||||||
const workspaceKeys = (workspaceData?.keys || []) as Array<any>
|
|
||||||
const personalKeys = (personalData?.keys || []) as Array<any>
|
|
||||||
|
|
||||||
return workspaceKeys.length > 0 || personalKeys.length > 0
|
|
||||||
} catch (error) {
|
|
||||||
const logger = createLogger('DeployApiClientTool')
|
|
||||||
logger.warn('Failed to check API keys:', error)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Opens the settings modal to the API keys tab
|
|
||||||
*/
|
|
||||||
private openApiKeysModal(): void {
|
|
||||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'apikeys' } }))
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: DeployApiArgs): Promise<void> {
|
|
||||||
const logger = createLogger('DeployApiClientTool')
|
|
||||||
try {
|
|
||||||
const action = args?.action || 'deploy'
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflowId = args?.workflowId || activeWorkflowId
|
|
||||||
|
|
||||||
if (!workflowId) {
|
|
||||||
throw new Error('No workflow ID provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = workflows[workflowId]
|
|
||||||
const workspaceId = workflow?.workspaceId
|
|
||||||
|
|
||||||
// For deploy action, check if user has API keys first
|
|
||||||
if (action === 'deploy') {
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('Workflow workspace not found')
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasKeys = await this.hasApiKeys(workspaceId)
|
|
||||||
|
|
||||||
if (!hasKeys) {
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
this.openApiKeysModal()
|
|
||||||
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
'Cannot deploy without an API key. Opened API key settings so you can create one. Once you have an API key, try deploying again.',
|
|
||||||
{
|
|
||||||
needsApiKey: true,
|
|
||||||
message:
|
|
||||||
'You need to create an API key before you can deploy your workflow. The API key settings have been opened for you. After creating an API key, you can deploy your workflow.',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const endpoint = `/api/workflows/${workflowId}/deploy`
|
|
||||||
const method = action === 'deploy' ? 'POST' : 'DELETE'
|
|
||||||
|
|
||||||
const res = await fetch(endpoint, {
|
|
||||||
method,
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: action === 'deploy' ? JSON.stringify({ deployChatEnabled: false }) : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
|
|
||||||
let successMessage = ''
|
|
||||||
let resultData: any = {
|
|
||||||
action,
|
|
||||||
isDeployed: action === 'deploy',
|
|
||||||
deployedAt: json.deployedAt,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (action === 'deploy') {
|
|
||||||
const appUrl = getBaseUrl()
|
|
||||||
const apiEndpoint = `${appUrl}/api/workflows/${workflowId}/execute`
|
|
||||||
const apiKeyPlaceholder = '$SIM_API_KEY'
|
|
||||||
|
|
||||||
const inputExample = getInputFormatExample(false)
|
|
||||||
const curlCommand = `curl -X POST -H "X-API-Key: ${apiKeyPlaceholder}" -H "Content-Type: application/json"${inputExample} ${apiEndpoint}`
|
|
||||||
|
|
||||||
successMessage = 'Workflow deployed successfully as API. You can now call it via REST.'
|
|
||||||
|
|
||||||
resultData = {
|
|
||||||
...resultData,
|
|
||||||
endpoint: apiEndpoint,
|
|
||||||
curlCommand,
|
|
||||||
apiKeyPlaceholder,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
successMessage = 'Workflow undeployed successfully.'
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, successMessage, resultData)
|
|
||||||
|
|
||||||
// Refresh the workflow registry to update deployment status
|
|
||||||
try {
|
|
||||||
const setDeploymentStatus = useWorkflowRegistry.getState().setDeploymentStatus
|
|
||||||
if (action === 'deploy') {
|
|
||||||
setDeploymentStatus(
|
|
||||||
workflowId,
|
|
||||||
true,
|
|
||||||
json.deployedAt ? new Date(json.deployedAt) : undefined,
|
|
||||||
json.apiKey || ''
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
setDeploymentStatus(workflowId, false, undefined, '')
|
|
||||||
}
|
|
||||||
const actionPast = action === 'undeploy' ? 'undeployed' : 'deployed'
|
|
||||||
logger.info(`Workflow ${actionPast} as API and registry updated`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to update workflow registry:', error)
|
|
||||||
}
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Deploy API failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to deploy API')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: DeployApiArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MessageSquare, XCircle } from 'lucide-react'
|
import { Loader2, MessageSquare, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -7,7 +6,6 @@ import {
|
|||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
export type ChatAuthType = 'public' | 'password' | 'email' | 'sso'
|
export type ChatAuthType = 'public' | 'password' | 'email' | 'sso'
|
||||||
|
|
||||||
@@ -118,263 +116,8 @@ export class DeployChatClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
* Generates a default identifier from the workflow name
|
// Client tool provides UI metadata only
|
||||||
*/
|
|
||||||
private generateIdentifier(workflowName: string): string {
|
|
||||||
return workflowName
|
|
||||||
.toLowerCase()
|
|
||||||
.replace(/[^a-z0-9]+/g, '-')
|
|
||||||
.replace(/^-|-$/g, '')
|
|
||||||
.substring(0, 50)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: DeployChatArgs): Promise<void> {
|
|
||||||
const logger = createLogger('DeployChatClientTool')
|
|
||||||
try {
|
|
||||||
const action = args?.action || 'deploy'
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflowId = args?.workflowId || activeWorkflowId
|
|
||||||
|
|
||||||
if (!workflowId) {
|
|
||||||
throw new Error('No workflow ID provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = workflows[workflowId]
|
|
||||||
|
|
||||||
// Handle undeploy action
|
|
||||||
if (action === 'undeploy') {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// First get the chat deployment ID
|
|
||||||
const statusRes = await fetch(`/api/workflows/${workflowId}/chat/status`)
|
|
||||||
if (!statusRes.ok) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, 'Failed to check chat deployment status', {
|
|
||||||
success: false,
|
|
||||||
action: 'undeploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: 'Failed to check chat deployment status',
|
|
||||||
errorCode: 'SERVER_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const statusJson = await statusRes.json()
|
|
||||||
if (!statusJson.isDeployed || !statusJson.deployment?.id) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'No active chat deployment found for this workflow', {
|
|
||||||
success: false,
|
|
||||||
action: 'undeploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: 'No active chat deployment found for this workflow',
|
|
||||||
errorCode: 'VALIDATION_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const chatId = statusJson.deployment.id
|
|
||||||
|
|
||||||
// Delete the chat deployment
|
|
||||||
const res = await fetch(`/api/chat/manage/${chatId}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(res.status, txt || `Server error (${res.status})`, {
|
|
||||||
success: false,
|
|
||||||
action: 'undeploy',
|
|
||||||
isDeployed: true,
|
|
||||||
error: txt || 'Failed to undeploy chat',
|
|
||||||
errorCode: 'SERVER_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Chat deployment removed successfully.', {
|
|
||||||
success: true,
|
|
||||||
action: 'undeploy',
|
|
||||||
isDeployed: false,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const statusRes = await fetch(`/api/workflows/${workflowId}/chat/status`)
|
|
||||||
const statusJson = statusRes.ok ? await statusRes.json() : null
|
|
||||||
const existingDeployment = statusJson?.deployment || null
|
|
||||||
|
|
||||||
const baseIdentifier =
|
|
||||||
existingDeployment?.identifier || this.generateIdentifier(workflow?.name || 'chat')
|
|
||||||
const baseTitle = existingDeployment?.title || workflow?.name || 'Chat'
|
|
||||||
const baseDescription = existingDeployment?.description || ''
|
|
||||||
const baseAuthType = existingDeployment?.authType || 'public'
|
|
||||||
const baseWelcomeMessage =
|
|
||||||
existingDeployment?.customizations?.welcomeMessage || 'Hi there! How can I help you today?'
|
|
||||||
const basePrimaryColor =
|
|
||||||
existingDeployment?.customizations?.primaryColor || 'var(--brand-primary-hover-hex)'
|
|
||||||
const baseAllowedEmails = Array.isArray(existingDeployment?.allowedEmails)
|
|
||||||
? existingDeployment.allowedEmails
|
|
||||||
: []
|
|
||||||
const baseOutputConfigs = Array.isArray(existingDeployment?.outputConfigs)
|
|
||||||
? existingDeployment.outputConfigs
|
|
||||||
: []
|
|
||||||
|
|
||||||
const identifier = args?.identifier || baseIdentifier
|
|
||||||
const title = args?.title || baseTitle
|
|
||||||
const description = args?.description ?? baseDescription
|
|
||||||
const authType = args?.authType || baseAuthType
|
|
||||||
const welcomeMessage = args?.welcomeMessage || baseWelcomeMessage
|
|
||||||
const outputConfigs = args?.outputConfigs || baseOutputConfigs
|
|
||||||
const allowedEmails = args?.allowedEmails || baseAllowedEmails
|
|
||||||
const primaryColor = basePrimaryColor
|
|
||||||
|
|
||||||
if (!identifier || !title) {
|
|
||||||
throw new Error('Chat identifier and title are required')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (authType === 'password' && !args?.password && !existingDeployment?.hasPassword) {
|
|
||||||
throw new Error('Password is required when using password protection')
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((authType === 'email' || authType === 'sso') && allowedEmails.length === 0) {
|
|
||||||
throw new Error(`At least one email or domain is required when using ${authType} access`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
workflowId,
|
|
||||||
identifier: identifier.trim(),
|
|
||||||
title: title.trim(),
|
|
||||||
description: description.trim(),
|
|
||||||
customizations: {
|
|
||||||
primaryColor,
|
|
||||||
welcomeMessage: welcomeMessage.trim(),
|
|
||||||
},
|
|
||||||
authType,
|
|
||||||
password: authType === 'password' ? args?.password : undefined,
|
|
||||||
allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [],
|
|
||||||
outputConfigs,
|
|
||||||
}
|
|
||||||
|
|
||||||
const isUpdating = Boolean(existingDeployment?.id)
|
|
||||||
const endpoint = isUpdating ? `/api/chat/manage/${existingDeployment.id}` : '/api/chat'
|
|
||||||
const method = isUpdating ? 'PATCH' : 'POST'
|
|
||||||
|
|
||||||
const res = await fetch(endpoint, {
|
|
||||||
method,
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(payload),
|
|
||||||
})
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
if (json.error === 'Identifier already in use') {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(
|
|
||||||
400,
|
|
||||||
`The identifier "${identifier}" is already in use. Please choose a different one.`,
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: false,
|
|
||||||
identifier,
|
|
||||||
error: `Identifier "${identifier}" is already taken`,
|
|
||||||
errorCode: 'IDENTIFIER_TAKEN',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle validation errors
|
|
||||||
if (json.code === 'VALIDATION_ERROR') {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, json.error || 'Validation error', {
|
|
||||||
success: false,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: json.error,
|
|
||||||
errorCode: 'VALIDATION_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(res.status, json.error || 'Failed to deploy chat', {
|
|
||||||
success: false,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: json.error || 'Server error',
|
|
||||||
errorCode: 'SERVER_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!json.chatUrl) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, 'Response missing chat URL', {
|
|
||||||
success: false,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: 'Response missing chat URL',
|
|
||||||
errorCode: 'SERVER_ERROR',
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`Chat deployed successfully! Available at: ${json.chatUrl}`,
|
|
||||||
{
|
|
||||||
success: true,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: true,
|
|
||||||
chatId: json.id,
|
|
||||||
chatUrl: json.chatUrl,
|
|
||||||
identifier,
|
|
||||||
title,
|
|
||||||
authType,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// Update the workflow registry to reflect deployment status
|
|
||||||
// Chat deployment also deploys the API, so we update the registry
|
|
||||||
try {
|
|
||||||
const setDeploymentStatus = useWorkflowRegistry.getState().setDeploymentStatus
|
|
||||||
setDeploymentStatus(workflowId, true, new Date(), '')
|
|
||||||
logger.info('Workflow deployment status updated in registry')
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to update workflow registry:', error)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Chat deployed successfully:', json.chatUrl)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Deploy chat failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to deploy chat', {
|
|
||||||
success: false,
|
|
||||||
action: 'deploy',
|
|
||||||
isDeployed: false,
|
|
||||||
error: e?.message || 'Failed to deploy chat',
|
|
||||||
errorCode: 'SERVER_ERROR',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: DeployChatArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Server, XCircle } from 'lucide-react'
|
import { Loader2, Server, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,7 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
export interface ParameterDescription {
|
export interface ParameterDescription {
|
||||||
name: string
|
name: string
|
||||||
@@ -88,162 +86,8 @@ export class DeployMcpClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: DeployMcpArgs): Promise<void> {
|
|
||||||
const logger = createLogger('DeployMcpClientTool')
|
|
||||||
try {
|
|
||||||
if (!args?.serverId) {
|
|
||||||
throw new Error(
|
|
||||||
'Server ID is required. Use list_workspace_mcp_servers to get available servers.'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflowId = args?.workflowId || activeWorkflowId
|
|
||||||
|
|
||||||
if (!workflowId) {
|
|
||||||
throw new Error('No workflow ID available')
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflow = workflows[workflowId]
|
|
||||||
const workspaceId = workflow?.workspaceId
|
|
||||||
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('Workflow workspace not found')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if workflow is deployed
|
|
||||||
const deploymentStatus = useWorkflowRegistry
|
|
||||||
.getState()
|
|
||||||
.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
if (!deploymentStatus?.isDeployed) {
|
|
||||||
throw new Error(
|
|
||||||
'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
let parameterSchema: Record<string, unknown> | undefined
|
|
||||||
if (args?.parameterDescriptions && args.parameterDescriptions.length > 0) {
|
|
||||||
const properties: Record<string, { description: string }> = {}
|
|
||||||
for (const param of args.parameterDescriptions) {
|
|
||||||
properties[param.name] = { description: param.description }
|
|
||||||
}
|
|
||||||
parameterSchema = { properties }
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(
|
|
||||||
`/api/mcp/workflow-servers/${args.serverId}/tools?workspaceId=${workspaceId}`,
|
|
||||||
{
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
workflowId,
|
|
||||||
toolName: args.toolName?.trim(),
|
|
||||||
toolDescription: args.toolDescription?.trim(),
|
|
||||||
parameterSchema,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const data = await res.json()
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
if (data.error?.includes('already added')) {
|
|
||||||
const toolsRes = await fetch(
|
|
||||||
`/api/mcp/workflow-servers/${args.serverId}/tools?workspaceId=${workspaceId}`
|
|
||||||
)
|
|
||||||
const toolsJson = toolsRes.ok ? await toolsRes.json() : null
|
|
||||||
const tools = toolsJson?.data?.tools || []
|
|
||||||
const existingTool = tools.find((tool: any) => tool.workflowId === workflowId)
|
|
||||||
if (!existingTool?.id) {
|
|
||||||
throw new Error('This workflow is already deployed to this MCP server')
|
|
||||||
}
|
|
||||||
const patchRes = await fetch(
|
|
||||||
`/api/mcp/workflow-servers/${args.serverId}/tools/${existingTool.id}?workspaceId=${workspaceId}`,
|
|
||||||
{
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
toolName: args.toolName?.trim(),
|
|
||||||
toolDescription: args.toolDescription?.trim(),
|
|
||||||
parameterSchema,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
const patchJson = patchRes.ok ? await patchRes.json() : null
|
|
||||||
if (!patchRes.ok) {
|
|
||||||
const patchError = patchJson?.error || `Failed to update MCP tool (${patchRes.status})`
|
|
||||||
throw new Error(patchError)
|
|
||||||
}
|
|
||||||
const updatedTool = patchJson?.data?.tool
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`Workflow MCP tool updated to "${updatedTool?.toolName || existingTool.toolName}".`,
|
|
||||||
{
|
|
||||||
success: true,
|
|
||||||
toolId: updatedTool?.id || existingTool.id,
|
|
||||||
toolName: updatedTool?.toolName || existingTool.toolName,
|
|
||||||
toolDescription: updatedTool?.toolDescription || existingTool.toolDescription,
|
|
||||||
serverId: args.serverId,
|
|
||||||
updated: true,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
logger.info('Updated workflow MCP tool', { toolId: existingTool.id })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if (data.error?.includes('not deployed')) {
|
|
||||||
throw new Error('Workflow must be deployed before adding as an MCP tool')
|
|
||||||
}
|
|
||||||
if (data.error?.includes('Start block')) {
|
|
||||||
throw new Error('Workflow must have a Start block to be used as an MCP tool')
|
|
||||||
}
|
|
||||||
if (data.error?.includes('Server not found')) {
|
|
||||||
throw new Error(
|
|
||||||
'MCP server not found. Use list_workspace_mcp_servers to see available servers.'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
throw new Error(data.error || `Failed to deploy to MCP (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const tool = data.data?.tool
|
|
||||||
if (!tool) {
|
|
||||||
throw new Error('Response missing tool data')
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`Workflow deployed as MCP tool "${tool.toolName}" to server.`,
|
|
||||||
{
|
|
||||||
success: true,
|
|
||||||
toolId: tool.id,
|
|
||||||
toolName: tool.toolName,
|
|
||||||
toolDescription: tool.toolDescription,
|
|
||||||
serverId: args.serverId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(`Deployed workflow as MCP tool: ${tool.toolName}`)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Failed to deploy to MCP', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to deploy to MCP', {
|
|
||||||
success: false,
|
|
||||||
error: e?.message,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: DeployMcpArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Grid2x2, Grid2x2Check, Grid2x2X, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
import { Grid2x2, Grid2x2Check, Grid2x2X, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,126 +5,15 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { stripWorkflowDiffMarkers } from '@/lib/workflows/diff'
|
|
||||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
interface EditWorkflowOperation {
|
|
||||||
operation_type: 'add' | 'edit' | 'delete'
|
|
||||||
block_id: string
|
|
||||||
params?: Record<string, any>
|
|
||||||
}
|
|
||||||
|
|
||||||
interface EditWorkflowArgs {
|
|
||||||
operations: EditWorkflowOperation[]
|
|
||||||
workflowId: string
|
|
||||||
currentUserWorkflow?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class EditWorkflowClientTool extends BaseClientTool {
|
export class EditWorkflowClientTool extends BaseClientTool {
|
||||||
static readonly id = 'edit_workflow'
|
static readonly id = 'edit_workflow'
|
||||||
private lastResult: any | undefined
|
|
||||||
private hasExecuted = false
|
|
||||||
private hasAppliedDiff = false
|
|
||||||
private workflowId: string | undefined
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
constructor(toolCallId: string) {
|
||||||
super(toolCallId, EditWorkflowClientTool.id, EditWorkflowClientTool.metadata)
|
super(toolCallId, EditWorkflowClientTool.id, EditWorkflowClientTool.metadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
|
||||||
const logger = createLogger('EditWorkflowClientTool')
|
|
||||||
logger.info('markToolComplete payload', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
status,
|
|
||||||
message,
|
|
||||||
data,
|
|
||||||
})
|
|
||||||
return super.markToolComplete(status, message, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get sanitized workflow JSON from a workflow state, merge subblocks, and sanitize for copilot
|
|
||||||
* This matches what get_user_workflow returns
|
|
||||||
*/
|
|
||||||
private getSanitizedWorkflowJson(workflowState: any): string | undefined {
|
|
||||||
const logger = createLogger('EditWorkflowClientTool')
|
|
||||||
|
|
||||||
if (!this.workflowId) {
|
|
||||||
logger.warn('No workflowId available for getting sanitized workflow JSON')
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!workflowState) {
|
|
||||||
logger.warn('No workflow state provided')
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Normalize required properties
|
|
||||||
if (!workflowState.loops) workflowState.loops = {}
|
|
||||||
if (!workflowState.parallels) workflowState.parallels = {}
|
|
||||||
if (!workflowState.edges) workflowState.edges = []
|
|
||||||
if (!workflowState.blocks) workflowState.blocks = {}
|
|
||||||
|
|
||||||
// Merge latest subblock values so edits are reflected
|
|
||||||
let mergedState = workflowState
|
|
||||||
if (workflowState.blocks) {
|
|
||||||
mergedState = {
|
|
||||||
...workflowState,
|
|
||||||
blocks: mergeSubblockState(workflowState.blocks, this.workflowId as any),
|
|
||||||
}
|
|
||||||
logger.info('Merged subblock values into workflow state', {
|
|
||||||
workflowId: this.workflowId,
|
|
||||||
blockCount: Object.keys(mergedState.blocks || {}).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
|
||||||
const sanitizedState = sanitizeForCopilot(mergedState)
|
|
||||||
|
|
||||||
// Convert to JSON string for transport
|
|
||||||
const workflowJson = JSON.stringify(sanitizedState, null, 2)
|
|
||||||
logger.info('Successfully created sanitized workflow JSON', {
|
|
||||||
workflowId: this.workflowId,
|
|
||||||
jsonLength: workflowJson.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return workflowJson
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to get sanitized workflow JSON', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Safely get the current workflow JSON sanitized for copilot without throwing.
|
|
||||||
* Used to ensure we always include workflow state in markComplete.
|
|
||||||
*/
|
|
||||||
private getCurrentWorkflowJsonSafe(logger: ReturnType<typeof createLogger>): string | undefined {
|
|
||||||
try {
|
|
||||||
const currentState = useWorkflowStore.getState().getWorkflowState()
|
|
||||||
if (!currentState) {
|
|
||||||
logger.warn('No current workflow state available')
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
return this.getSanitizedWorkflowJson(currentState)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to get current workflow JSON safely', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
static readonly metadata: BaseClientToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
@@ -168,258 +56,9 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleAccept(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('EditWorkflowClientTool')
|
// Client tool provides UI metadata only for rendering tool call cards
|
||||||
logger.info('handleAccept called', { toolCallId: this.toolCallId, state: this.getState() })
|
// The server applies workflow changes directly in headless mode
|
||||||
// Tool was already marked complete in execute() - this is just for UI state
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
const logger = createLogger('EditWorkflowClientTool')
|
|
||||||
logger.info('handleReject called', { toolCallId: this.toolCallId, state: this.getState() })
|
|
||||||
// Tool was already marked complete in execute() - this is just for UI state
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: EditWorkflowArgs): Promise<void> {
|
|
||||||
const logger = createLogger('EditWorkflowClientTool')
|
|
||||||
|
|
||||||
if (this.hasExecuted) {
|
|
||||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use timeout protection to ensure tool always completes
|
|
||||||
await this.executeWithTimeout(async () => {
|
|
||||||
this.hasExecuted = true
|
|
||||||
logger.info('execute called', { toolCallId: this.toolCallId, argsProvided: !!args })
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Resolve workflowId
|
|
||||||
let workflowId = args?.workflowId
|
|
||||||
if (!workflowId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
workflowId = activeWorkflowId as any
|
|
||||||
}
|
|
||||||
if (!workflowId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store workflowId for later use
|
|
||||||
this.workflowId = workflowId
|
|
||||||
|
|
||||||
// Validate operations
|
|
||||||
const operations = args?.operations || []
|
|
||||||
if (!operations.length) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
await this.markToolComplete(
|
|
||||||
400,
|
|
||||||
'No operations provided for edit_workflow',
|
|
||||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prepare currentUserWorkflow JSON from stores to preserve block IDs
|
|
||||||
let currentUserWorkflow = args?.currentUserWorkflow
|
|
||||||
|
|
||||||
if (!currentUserWorkflow) {
|
|
||||||
try {
|
|
||||||
const workflowStore = useWorkflowStore.getState()
|
|
||||||
const fullState = workflowStore.getWorkflowState()
|
|
||||||
const mergedBlocks = mergeSubblockState(fullState.blocks, workflowId as any)
|
|
||||||
const payloadState = stripWorkflowDiffMarkers({
|
|
||||||
...fullState,
|
|
||||||
blocks: mergedBlocks,
|
|
||||||
edges: fullState.edges || [],
|
|
||||||
loops: fullState.loops || {},
|
|
||||||
parallels: fullState.parallels || {},
|
|
||||||
})
|
|
||||||
currentUserWorkflow = JSON.stringify(payloadState)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed to build currentUserWorkflow from stores; proceeding without it', {
|
|
||||||
error,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch with AbortController for timeout support
|
|
||||||
const controller = new AbortController()
|
|
||||||
const fetchTimeout = setTimeout(() => controller.abort(), 60000) // 60s fetch timeout
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
toolName: 'edit_workflow',
|
|
||||||
payload: {
|
|
||||||
operations,
|
|
||||||
workflowId,
|
|
||||||
...(currentUserWorkflow ? { currentUserWorkflow } : {}),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
signal: controller.signal,
|
|
||||||
})
|
|
||||||
|
|
||||||
clearTimeout(fetchTimeout)
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
let errorMessage: string
|
|
||||||
try {
|
|
||||||
const errorJson = JSON.parse(errorText)
|
|
||||||
errorMessage = errorJson.error || errorText || `Server error (${res.status})`
|
|
||||||
} catch {
|
|
||||||
errorMessage = errorText || `Server error (${res.status})`
|
|
||||||
}
|
|
||||||
// Mark complete with error but include current workflow state
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
await this.markToolComplete(
|
|
||||||
res.status,
|
|
||||||
errorMessage,
|
|
||||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = parsed.result as any
|
|
||||||
this.lastResult = result
|
|
||||||
logger.info('server result parsed', {
|
|
||||||
hasWorkflowState: !!result?.workflowState,
|
|
||||||
blocksCount: result?.workflowState
|
|
||||||
? Object.keys(result.workflowState.blocks || {}).length
|
|
||||||
: 0,
|
|
||||||
hasSkippedItems: !!result?.skippedItems,
|
|
||||||
skippedItemsCount: result?.skippedItems?.length || 0,
|
|
||||||
hasInputValidationErrors: !!result?.inputValidationErrors,
|
|
||||||
inputValidationErrorsCount: result?.inputValidationErrors?.length || 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Log skipped items and validation errors for visibility
|
|
||||||
if (result?.skippedItems?.length > 0) {
|
|
||||||
logger.warn('Some operations were skipped during edit_workflow', {
|
|
||||||
skippedItems: result.skippedItems,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (result?.inputValidationErrors?.length > 0) {
|
|
||||||
logger.warn('Some inputs were rejected during edit_workflow', {
|
|
||||||
inputValidationErrors: result.inputValidationErrors,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update diff directly with workflow state - no YAML conversion needed!
|
|
||||||
if (!result.workflowState) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
await this.markToolComplete(
|
|
||||||
500,
|
|
||||||
'No workflow state returned from server',
|
|
||||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
let actualDiffWorkflow: WorkflowState | null = null
|
|
||||||
|
|
||||||
if (!this.hasAppliedDiff) {
|
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
|
||||||
// setProposedChanges applies the state optimistically to the workflow store
|
|
||||||
await diffStore.setProposedChanges(result.workflowState)
|
|
||||||
logger.info('diff proposed changes set for edit_workflow with direct workflow state')
|
|
||||||
this.hasAppliedDiff = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read back the applied state from the workflow store
|
|
||||||
const workflowStore = useWorkflowStore.getState()
|
|
||||||
actualDiffWorkflow = workflowStore.getWorkflowState()
|
|
||||||
|
|
||||||
if (!actualDiffWorkflow) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
await this.markToolComplete(
|
|
||||||
500,
|
|
||||||
'Failed to retrieve workflow state after applying changes',
|
|
||||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the workflow state that was just applied, merge subblocks, and sanitize
|
|
||||||
// This matches what get_user_workflow would return (the true state after edits were applied)
|
|
||||||
let workflowJson = this.getSanitizedWorkflowJson(actualDiffWorkflow)
|
|
||||||
|
|
||||||
// Fallback: try to get current workflow state if sanitization failed
|
|
||||||
if (!workflowJson) {
|
|
||||||
workflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
// userWorkflow must always be present on success - log error if missing
|
|
||||||
if (!workflowJson) {
|
|
||||||
logger.error('Failed to get workflow JSON on success path - this should not happen', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
workflowId: this.workflowId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build sanitized data including workflow JSON and any skipped/validation info
|
|
||||||
// Always include userWorkflow on success paths
|
|
||||||
const sanitizedData: Record<string, any> = {
|
|
||||||
userWorkflow: workflowJson ?? '{}', // Fallback to empty object JSON if all else fails
|
|
||||||
}
|
|
||||||
|
|
||||||
// Include skipped items and validation errors in the response for LLM feedback
|
|
||||||
if (result?.skippedItems?.length > 0) {
|
|
||||||
sanitizedData.skippedItems = result.skippedItems
|
|
||||||
sanitizedData.skippedItemsMessage = result.skippedItemsMessage
|
|
||||||
}
|
|
||||||
if (result?.inputValidationErrors?.length > 0) {
|
|
||||||
sanitizedData.inputValidationErrors = result.inputValidationErrors
|
|
||||||
sanitizedData.inputValidationMessage = result.inputValidationMessage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a message that includes info about skipped items
|
|
||||||
let completeMessage = 'Workflow diff ready for review'
|
|
||||||
if (result?.skippedItems?.length > 0 || result?.inputValidationErrors?.length > 0) {
|
|
||||||
const parts: string[] = []
|
|
||||||
if (result?.skippedItems?.length > 0) {
|
|
||||||
parts.push(`${result.skippedItems.length} operation(s) skipped`)
|
|
||||||
}
|
|
||||||
if (result?.inputValidationErrors?.length > 0) {
|
|
||||||
parts.push(`${result.inputValidationErrors.length} input(s) rejected`)
|
|
||||||
}
|
|
||||||
completeMessage = `Workflow diff ready for review. Note: ${parts.join(', ')}.`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark complete early to unblock LLM stream - sanitizedData always has userWorkflow
|
|
||||||
await this.markToolComplete(200, completeMessage, sanitizedData)
|
|
||||||
|
|
||||||
// Move into review state
|
|
||||||
this.setState(ClientToolCallState.review, { result })
|
|
||||||
} catch (fetchError: any) {
|
|
||||||
clearTimeout(fetchTimeout)
|
|
||||||
// Handle error with current workflow state
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
|
||||||
const errorMessage =
|
|
||||||
fetchError.name === 'AbortError'
|
|
||||||
? 'Server request timed out'
|
|
||||||
: fetchError.message || String(fetchError)
|
|
||||||
await this.markToolComplete(
|
|
||||||
500,
|
|
||||||
errorMessage,
|
|
||||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,29 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Tag, X, XCircle } from 'lucide-react'
|
import { Loader2, Tag, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
computeBlockOutputPaths,
|
|
||||||
formatOutputsWithPrefix,
|
|
||||||
getSubflowInsidePaths,
|
|
||||||
getWorkflowSubBlockValues,
|
|
||||||
getWorkflowVariables,
|
|
||||||
} from '@/lib/copilot/tools/client/workflow/block-output-utils'
|
|
||||||
import {
|
|
||||||
GetBlockOutputsResult,
|
|
||||||
type GetBlockOutputsResultType,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
|
|
||||||
const logger = createLogger('GetBlockOutputsClientTool')
|
|
||||||
|
|
||||||
interface GetBlockOutputsArgs {
|
|
||||||
blockIds?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockOutputsClientTool extends BaseClientTool {
|
export class GetBlockOutputsClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_block_outputs'
|
static readonly id = 'get_block_outputs'
|
||||||
@@ -61,84 +41,6 @@ export class GetBlockOutputsClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetBlockOutputsArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowStore = useWorkflowStore.getState()
|
|
||||||
const blocks = workflowStore.blocks || {}
|
|
||||||
const loops = workflowStore.loops || {}
|
|
||||||
const parallels = workflowStore.parallels || {}
|
|
||||||
const subBlockValues = getWorkflowSubBlockValues(activeWorkflowId)
|
|
||||||
|
|
||||||
const ctx = { workflowId: activeWorkflowId, blocks, loops, parallels, subBlockValues }
|
|
||||||
const targetBlockIds =
|
|
||||||
args?.blockIds && args.blockIds.length > 0 ? args.blockIds : Object.keys(blocks)
|
|
||||||
|
|
||||||
const blockOutputs: GetBlockOutputsResultType['blocks'] = []
|
|
||||||
|
|
||||||
for (const blockId of targetBlockIds) {
|
|
||||||
const block = blocks[blockId]
|
|
||||||
if (!block?.type) continue
|
|
||||||
|
|
||||||
const blockName = block.name || block.type
|
|
||||||
|
|
||||||
const blockOutput: GetBlockOutputsResultType['blocks'][0] = {
|
|
||||||
blockId,
|
|
||||||
blockName,
|
|
||||||
blockType: block.type,
|
|
||||||
outputs: [],
|
|
||||||
}
|
|
||||||
|
|
||||||
// Include triggerMode if the block is in trigger mode
|
|
||||||
if (block.triggerMode) {
|
|
||||||
blockOutput.triggerMode = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if (block.type === 'loop' || block.type === 'parallel') {
|
|
||||||
const insidePaths = getSubflowInsidePaths(block.type, blockId, loops, parallels)
|
|
||||||
blockOutput.insideSubflowOutputs = formatOutputsWithPrefix(insidePaths, blockName)
|
|
||||||
blockOutput.outsideSubflowOutputs = formatOutputsWithPrefix(['results'], blockName)
|
|
||||||
} else {
|
|
||||||
const outputPaths = computeBlockOutputPaths(block, ctx)
|
|
||||||
blockOutput.outputs = formatOutputsWithPrefix(outputPaths, blockName)
|
|
||||||
}
|
|
||||||
|
|
||||||
blockOutputs.push(blockOutput)
|
|
||||||
}
|
|
||||||
|
|
||||||
const includeVariables = !args?.blockIds || args.blockIds.length === 0
|
|
||||||
const resultData: {
|
|
||||||
blocks: typeof blockOutputs
|
|
||||||
variables?: ReturnType<typeof getWorkflowVariables>
|
|
||||||
} = {
|
|
||||||
blocks: blockOutputs,
|
|
||||||
}
|
|
||||||
if (includeVariables) {
|
|
||||||
resultData.variables = getWorkflowVariables(activeWorkflowId)
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = GetBlockOutputsResult.parse(resultData)
|
|
||||||
|
|
||||||
logger.info('Retrieved block outputs', {
|
|
||||||
blockCount: blockOutputs.length,
|
|
||||||
variableCount: resultData.variables?.length ?? 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Retrieved block outputs', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Error in tool execution', { toolCallId: this.toolCallId, error, message })
|
|
||||||
await this.markToolComplete(500, message || 'Failed to get block outputs')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,32 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { GitBranch, Loader2, X, XCircle } from 'lucide-react'
|
import { GitBranch, Loader2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import {
|
|
||||||
computeBlockOutputPaths,
|
|
||||||
formatOutputsWithPrefix,
|
|
||||||
getSubflowInsidePaths,
|
|
||||||
getWorkflowSubBlockValues,
|
|
||||||
getWorkflowVariables,
|
|
||||||
} from '@/lib/copilot/tools/client/workflow/block-output-utils'
|
|
||||||
import {
|
|
||||||
GetBlockUpstreamReferencesResult,
|
|
||||||
type GetBlockUpstreamReferencesResultType,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
|
||||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
const logger = createLogger('GetBlockUpstreamReferencesClientTool')
|
|
||||||
|
|
||||||
interface GetBlockUpstreamReferencesArgs {
|
|
||||||
blockIds: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockUpstreamReferencesClientTool extends BaseClientTool {
|
export class GetBlockUpstreamReferencesClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_block_upstream_references'
|
static readonly id = 'get_block_upstream_references'
|
||||||
@@ -68,164 +45,6 @@ export class GetBlockUpstreamReferencesClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetBlockUpstreamReferencesArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
if (!args?.blockIds || args.blockIds.length === 0) {
|
|
||||||
await this.markToolComplete(400, 'blockIds array is required')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowStore = useWorkflowStore.getState()
|
|
||||||
const blocks = workflowStore.blocks || {}
|
|
||||||
const edges = workflowStore.edges || []
|
|
||||||
const loops = workflowStore.loops || {}
|
|
||||||
const parallels = workflowStore.parallels || {}
|
|
||||||
const subBlockValues = getWorkflowSubBlockValues(activeWorkflowId)
|
|
||||||
|
|
||||||
const ctx = { workflowId: activeWorkflowId, blocks, loops, parallels, subBlockValues }
|
|
||||||
const variableOutputs = getWorkflowVariables(activeWorkflowId)
|
|
||||||
const graphEdges = edges.map((edge) => ({ source: edge.source, target: edge.target }))
|
|
||||||
|
|
||||||
const results: GetBlockUpstreamReferencesResultType['results'] = []
|
|
||||||
|
|
||||||
for (const blockId of args.blockIds) {
|
|
||||||
const targetBlock = blocks[blockId]
|
|
||||||
if (!targetBlock) {
|
|
||||||
logger.warn(`Block ${blockId} not found`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const insideSubflows: { blockId: string; blockName: string; blockType: string }[] = []
|
|
||||||
const containingLoopIds = new Set<string>()
|
|
||||||
const containingParallelIds = new Set<string>()
|
|
||||||
|
|
||||||
Object.values(loops as Record<string, Loop>).forEach((loop) => {
|
|
||||||
if (loop?.nodes?.includes(blockId)) {
|
|
||||||
containingLoopIds.add(loop.id)
|
|
||||||
const loopBlock = blocks[loop.id]
|
|
||||||
if (loopBlock) {
|
|
||||||
insideSubflows.push({
|
|
||||||
blockId: loop.id,
|
|
||||||
blockName: loopBlock.name || loopBlock.type,
|
|
||||||
blockType: 'loop',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
Object.values(parallels as Record<string, Parallel>).forEach((parallel) => {
|
|
||||||
if (parallel?.nodes?.includes(blockId)) {
|
|
||||||
containingParallelIds.add(parallel.id)
|
|
||||||
const parallelBlock = blocks[parallel.id]
|
|
||||||
if (parallelBlock) {
|
|
||||||
insideSubflows.push({
|
|
||||||
blockId: parallel.id,
|
|
||||||
blockName: parallelBlock.name || parallelBlock.type,
|
|
||||||
blockType: 'parallel',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
const ancestorIds = BlockPathCalculator.findAllPathNodes(graphEdges, blockId)
|
|
||||||
const accessibleIds = new Set<string>(ancestorIds)
|
|
||||||
accessibleIds.add(blockId)
|
|
||||||
|
|
||||||
const starterBlock = Object.values(blocks).find((b) => isInputDefinitionTrigger(b.type))
|
|
||||||
if (starterBlock && ancestorIds.includes(starterBlock.id)) {
|
|
||||||
accessibleIds.add(starterBlock.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
containingLoopIds.forEach((loopId) => {
|
|
||||||
accessibleIds.add(loopId)
|
|
||||||
loops[loopId]?.nodes?.forEach((nodeId) => accessibleIds.add(nodeId))
|
|
||||||
})
|
|
||||||
|
|
||||||
containingParallelIds.forEach((parallelId) => {
|
|
||||||
accessibleIds.add(parallelId)
|
|
||||||
parallels[parallelId]?.nodes?.forEach((nodeId) => accessibleIds.add(nodeId))
|
|
||||||
})
|
|
||||||
|
|
||||||
const accessibleBlocks: GetBlockUpstreamReferencesResultType['results'][0]['accessibleBlocks'] =
|
|
||||||
[]
|
|
||||||
|
|
||||||
for (const accessibleBlockId of accessibleIds) {
|
|
||||||
const block = blocks[accessibleBlockId]
|
|
||||||
if (!block?.type) continue
|
|
||||||
|
|
||||||
const canSelfReference = block.type === 'approval' || block.type === 'human_in_the_loop'
|
|
||||||
if (accessibleBlockId === blockId && !canSelfReference) continue
|
|
||||||
|
|
||||||
const blockName = block.name || block.type
|
|
||||||
let accessContext: 'inside' | 'outside' | undefined
|
|
||||||
let outputPaths: string[]
|
|
||||||
|
|
||||||
if (block.type === 'loop' || block.type === 'parallel') {
|
|
||||||
const isInside =
|
|
||||||
(block.type === 'loop' && containingLoopIds.has(accessibleBlockId)) ||
|
|
||||||
(block.type === 'parallel' && containingParallelIds.has(accessibleBlockId))
|
|
||||||
|
|
||||||
accessContext = isInside ? 'inside' : 'outside'
|
|
||||||
outputPaths = isInside
|
|
||||||
? getSubflowInsidePaths(block.type, accessibleBlockId, loops, parallels)
|
|
||||||
: ['results']
|
|
||||||
} else {
|
|
||||||
outputPaths = computeBlockOutputPaths(block, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
const formattedOutputs = formatOutputsWithPrefix(outputPaths, blockName)
|
|
||||||
|
|
||||||
const entry: GetBlockUpstreamReferencesResultType['results'][0]['accessibleBlocks'][0] = {
|
|
||||||
blockId: accessibleBlockId,
|
|
||||||
blockName,
|
|
||||||
blockType: block.type,
|
|
||||||
outputs: formattedOutputs,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Include triggerMode if the block is in trigger mode
|
|
||||||
if (block.triggerMode) {
|
|
||||||
entry.triggerMode = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if (accessContext) entry.accessContext = accessContext
|
|
||||||
accessibleBlocks.push(entry)
|
|
||||||
}
|
|
||||||
|
|
||||||
const resultEntry: GetBlockUpstreamReferencesResultType['results'][0] = {
|
|
||||||
blockId,
|
|
||||||
blockName: targetBlock.name || targetBlock.type,
|
|
||||||
accessibleBlocks,
|
|
||||||
variables: variableOutputs,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (insideSubflows.length > 0) resultEntry.insideSubflows = insideSubflows
|
|
||||||
results.push(resultEntry)
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = GetBlockUpstreamReferencesResult.parse({ results })
|
|
||||||
|
|
||||||
logger.info('Retrieved upstream references', {
|
|
||||||
blockIds: args.blockIds,
|
|
||||||
resultCount: results.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Retrieved upstream references', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Error in tool execution', { toolCallId: this.toolCallId, error, message })
|
|
||||||
await this.markToolComplete(500, message || 'Failed to get upstream references')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Workflow as WorkflowIcon, X, XCircle } from 'lucide-react'
|
import { Loader2, Workflow as WorkflowIcon, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { stripWorkflowDiffMarkers } from '@/lib/workflows/diff'
|
|
||||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
|
|
||||||
interface GetUserWorkflowArgs {
|
|
||||||
workflowId?: string
|
|
||||||
includeMetadata?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
const logger = createLogger('GetUserWorkflowClientTool')
|
|
||||||
|
|
||||||
export class GetUserWorkflowClientTool extends BaseClientTool {
|
export class GetUserWorkflowClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_user_workflow'
|
static readonly id = 'get_user_workflow'
|
||||||
@@ -60,128 +48,6 @@ export class GetUserWorkflowClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetUserWorkflowArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Determine workflow ID (explicit or active)
|
|
||||||
let workflowId = args?.workflowId
|
|
||||||
if (!workflowId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
workflowId = activeWorkflowId as any
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Fetching user workflow from stores', {
|
|
||||||
workflowId,
|
|
||||||
includeMetadata: args?.includeMetadata,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Always use main workflow store as the source of truth
|
|
||||||
const workflowStore = useWorkflowStore.getState()
|
|
||||||
const fullWorkflowState = workflowStore.getWorkflowState()
|
|
||||||
|
|
||||||
let workflowState: any = null
|
|
||||||
|
|
||||||
if (!fullWorkflowState || !fullWorkflowState.blocks) {
|
|
||||||
const workflowRegistry = useWorkflowRegistry.getState()
|
|
||||||
const wfKey = String(workflowId)
|
|
||||||
const workflow = (workflowRegistry as any).workflows?.[wfKey]
|
|
||||||
|
|
||||||
if (!workflow) {
|
|
||||||
await this.markToolComplete(404, `Workflow ${workflowId} not found in any store`)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn('No workflow state found, using workflow metadata only', { workflowId })
|
|
||||||
workflowState = workflow
|
|
||||||
} else {
|
|
||||||
workflowState = stripWorkflowDiffMarkers(fullWorkflowState)
|
|
||||||
logger.info('Using workflow state from workflow store', {
|
|
||||||
workflowId,
|
|
||||||
blockCount: Object.keys(fullWorkflowState.blocks || {}).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalize required properties
|
|
||||||
if (workflowState) {
|
|
||||||
if (!workflowState.loops) workflowState.loops = {}
|
|
||||||
if (!workflowState.parallels) workflowState.parallels = {}
|
|
||||||
if (!workflowState.edges) workflowState.edges = []
|
|
||||||
if (!workflowState.blocks) workflowState.blocks = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge latest subblock values so edits are reflected
|
|
||||||
try {
|
|
||||||
if (workflowState?.blocks) {
|
|
||||||
workflowState = {
|
|
||||||
...workflowState,
|
|
||||||
blocks: mergeSubblockState(workflowState.blocks, workflowId as any),
|
|
||||||
}
|
|
||||||
logger.info('Merged subblock values into workflow state', {
|
|
||||||
workflowId,
|
|
||||||
blockCount: Object.keys(workflowState.blocks || {}).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (mergeError) {
|
|
||||||
logger.warn('Failed to merge subblock values; proceeding with raw workflow state', {
|
|
||||||
workflowId,
|
|
||||||
error: mergeError instanceof Error ? mergeError.message : String(mergeError),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Validating workflow state', {
|
|
||||||
workflowId,
|
|
||||||
hasWorkflowState: !!workflowState,
|
|
||||||
hasBlocks: !!workflowState?.blocks,
|
|
||||||
workflowStateType: typeof workflowState,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!workflowState || !workflowState.blocks) {
|
|
||||||
await this.markToolComplete(422, 'Workflow state is empty or invalid')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
|
||||||
const sanitizedState = sanitizeForCopilot(workflowState)
|
|
||||||
|
|
||||||
// Convert to JSON string for transport
|
|
||||||
let workflowJson = ''
|
|
||||||
try {
|
|
||||||
workflowJson = JSON.stringify(sanitizedState, null, 2)
|
|
||||||
logger.info('Successfully stringified sanitized workflow state', {
|
|
||||||
workflowId,
|
|
||||||
jsonLength: workflowJson.length,
|
|
||||||
})
|
|
||||||
} catch (stringifyError) {
|
|
||||||
await this.markToolComplete(
|
|
||||||
500,
|
|
||||||
`Failed to convert workflow to JSON: ${
|
|
||||||
stringifyError instanceof Error ? stringifyError.message : 'Unknown error'
|
|
||||||
}`
|
|
||||||
)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mark complete with data; keep state success for store render
|
|
||||||
await this.markToolComplete(200, 'Workflow analyzed', { userWorkflow: workflowJson })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Error in tool execution', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
error,
|
|
||||||
message,
|
|
||||||
})
|
|
||||||
await this.markToolComplete(500, message || 'Failed to fetch workflow')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MinusCircle, TerminalSquare, XCircle } from 'lucide-react'
|
import { Loader2, MinusCircle, TerminalSquare, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface GetWorkflowConsoleArgs {
|
|
||||||
workflowId?: string
|
|
||||||
limit?: number
|
|
||||||
includeDetails?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetWorkflowConsoleClientTool extends BaseClientTool {
|
export class GetWorkflowConsoleClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_workflow_console'
|
static readonly id = 'get_workflow_console'
|
||||||
@@ -61,52 +52,6 @@ export class GetWorkflowConsoleClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetWorkflowConsoleArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('GetWorkflowConsoleClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const params = args || {}
|
|
||||||
let workflowId = params.workflowId
|
|
||||||
if (!workflowId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
workflowId = activeWorkflowId || undefined
|
|
||||||
}
|
|
||||||
if (!workflowId) {
|
|
||||||
logger.error('No active workflow found for console fetch')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
workflowId,
|
|
||||||
limit: params.limit ?? 3,
|
|
||||||
includeDetails: params.includeDetails ?? true,
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_workflow_console', payload }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
throw new Error(text || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
|
|
||||||
// Mark success and include result data for UI rendering
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Workflow console fetched', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
const message = e instanceof Error ? e.message : String(e)
|
|
||||||
createLogger('GetWorkflowConsoleClientTool').error('execute failed', { message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,13 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Database, Loader2, X, XCircle } from 'lucide-react'
|
import { Database, Loader2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
const logger = createLogger('GetWorkflowDataClientTool')
|
|
||||||
|
|
||||||
/** Data type enum for the get_workflow_data tool */
|
/** Data type enum for the get_workflow_data tool */
|
||||||
export type WorkflowDataType = 'global_variables' | 'custom_tools' | 'mcp_tools' | 'files'
|
export type WorkflowDataType = 'global_variables' | 'custom_tools' | 'mcp_tools' | 'files'
|
||||||
|
|
||||||
interface GetWorkflowDataArgs {
|
|
||||||
data_type: WorkflowDataType
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetWorkflowDataClientTool extends BaseClientTool {
|
export class GetWorkflowDataClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_workflow_data'
|
static readonly id = 'get_workflow_data'
|
||||||
|
|
||||||
@@ -65,205 +57,6 @@ export class GetWorkflowDataClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetWorkflowDataArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const dataType = args?.data_type
|
|
||||||
if (!dataType) {
|
|
||||||
await this.markToolComplete(400, 'Missing data_type parameter')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const { activeWorkflowId, hydration } = useWorkflowRegistry.getState()
|
|
||||||
const activeWorkspaceId = hydration.workspaceId
|
|
||||||
|
|
||||||
switch (dataType) {
|
|
||||||
case 'global_variables':
|
|
||||||
await this.fetchGlobalVariables(activeWorkflowId)
|
|
||||||
break
|
|
||||||
case 'custom_tools':
|
|
||||||
await this.fetchCustomTools(activeWorkspaceId)
|
|
||||||
break
|
|
||||||
case 'mcp_tools':
|
|
||||||
await this.fetchMcpTools(activeWorkspaceId)
|
|
||||||
break
|
|
||||||
case 'files':
|
|
||||||
await this.fetchFiles(activeWorkspaceId)
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
await this.markToolComplete(400, `Unknown data_type: ${dataType}`)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message || 'Failed to fetch workflow data')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch global workflow variables
|
|
||||||
*/
|
|
||||||
private async fetchGlobalVariables(workflowId: string | null): Promise<void> {
|
|
||||||
if (!workflowId) {
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/workflows/${workflowId}/variables`, { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch workflow variables')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const varsRecord = (json?.data as Record<string, unknown>) || {}
|
|
||||||
const variables = Object.values(varsRecord).map((v: unknown) => {
|
|
||||||
const variable = v as { id?: string; name?: string; value?: unknown }
|
|
||||||
return {
|
|
||||||
id: String(variable?.id || ''),
|
|
||||||
name: String(variable?.name || ''),
|
|
||||||
value: variable?.value,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Fetched workflow variables', { count: variables.length })
|
|
||||||
await this.markToolComplete(200, `Found ${variables.length} variable(s)`, { variables })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch custom tools for the workspace
|
|
||||||
*/
|
|
||||||
private async fetchCustomTools(workspaceId: string | null): Promise<void> {
|
|
||||||
if (!workspaceId) {
|
|
||||||
await this.markToolComplete(400, 'No active workspace found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/tools/custom?workspaceId=${workspaceId}`, { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch custom tools')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const toolsData = (json?.data as unknown[]) || []
|
|
||||||
const customTools = toolsData.map((tool: unknown) => {
|
|
||||||
const t = tool as {
|
|
||||||
id?: string
|
|
||||||
title?: string
|
|
||||||
schema?: { function?: { name?: string; description?: string; parameters?: unknown } }
|
|
||||||
code?: string
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
id: String(t?.id || ''),
|
|
||||||
title: String(t?.title || ''),
|
|
||||||
functionName: String(t?.schema?.function?.name || ''),
|
|
||||||
description: String(t?.schema?.function?.description || ''),
|
|
||||||
parameters: t?.schema?.function?.parameters,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Fetched custom tools', { count: customTools.length })
|
|
||||||
await this.markToolComplete(200, `Found ${customTools.length} custom tool(s)`, { customTools })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch MCP tools for the workspace
|
|
||||||
*/
|
|
||||||
private async fetchMcpTools(workspaceId: string | null): Promise<void> {
|
|
||||||
if (!workspaceId) {
|
|
||||||
await this.markToolComplete(400, 'No active workspace found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/mcp/tools/discover?workspaceId=${workspaceId}`, { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch MCP tools')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const toolsData = (json?.data?.tools as unknown[]) || []
|
|
||||||
const mcpTools = toolsData.map((tool: unknown) => {
|
|
||||||
const t = tool as {
|
|
||||||
name?: string
|
|
||||||
serverId?: string
|
|
||||||
serverName?: string
|
|
||||||
description?: string
|
|
||||||
inputSchema?: unknown
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
name: String(t?.name || ''),
|
|
||||||
serverId: String(t?.serverId || ''),
|
|
||||||
serverName: String(t?.serverName || ''),
|
|
||||||
description: String(t?.description || ''),
|
|
||||||
inputSchema: t?.inputSchema,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Fetched MCP tools', { count: mcpTools.length })
|
|
||||||
await this.markToolComplete(200, `Found ${mcpTools.length} MCP tool(s)`, { mcpTools })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch workspace files metadata
|
|
||||||
*/
|
|
||||||
private async fetchFiles(workspaceId: string | null): Promise<void> {
|
|
||||||
if (!workspaceId) {
|
|
||||||
await this.markToolComplete(400, 'No active workspace found')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/workspaces/${workspaceId}/files`, { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch files')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const filesData = (json?.files as unknown[]) || []
|
|
||||||
const files = filesData.map((file: unknown) => {
|
|
||||||
const f = file as {
|
|
||||||
id?: string
|
|
||||||
name?: string
|
|
||||||
key?: string
|
|
||||||
path?: string
|
|
||||||
size?: number
|
|
||||||
type?: string
|
|
||||||
uploadedAt?: string
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
id: String(f?.id || ''),
|
|
||||||
name: String(f?.name || ''),
|
|
||||||
key: String(f?.key || ''),
|
|
||||||
path: String(f?.path || ''),
|
|
||||||
size: Number(f?.size || 0),
|
|
||||||
type: String(f?.type || ''),
|
|
||||||
uploadedAt: String(f?.uploadedAt || ''),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Fetched workspace files', { count: files.length })
|
|
||||||
await this.markToolComplete(200, `Found ${files.length} file(s)`, { files })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { FileText, Loader2, X, XCircle } from 'lucide-react'
|
import { FileText, Loader2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
const logger = createLogger('GetWorkflowFromNameClientTool')
|
|
||||||
|
|
||||||
interface GetWorkflowFromNameArgs {
|
|
||||||
workflow_name: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetWorkflowFromNameClientTool extends BaseClientTool {
|
export class GetWorkflowFromNameClientTool extends BaseClientTool {
|
||||||
static readonly id = 'get_workflow_from_name'
|
static readonly id = 'get_workflow_from_name'
|
||||||
@@ -54,66 +45,6 @@ export class GetWorkflowFromNameClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: GetWorkflowFromNameArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const workflowName = args?.workflow_name?.trim()
|
|
||||||
if (!workflowName) {
|
|
||||||
await this.markToolComplete(400, 'workflow_name is required')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to find by name from registry first to get ID
|
|
||||||
const registry = useWorkflowRegistry.getState()
|
|
||||||
const match = Object.values((registry as any).workflows || {}).find(
|
|
||||||
(w: any) =>
|
|
||||||
String(w?.name || '')
|
|
||||||
.trim()
|
|
||||||
.toLowerCase() === workflowName.toLowerCase()
|
|
||||||
) as any
|
|
||||||
|
|
||||||
if (!match?.id) {
|
|
||||||
await this.markToolComplete(404, `Workflow not found: ${workflowName}`)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch full workflow from API route (normalized tables)
|
|
||||||
const res = await fetch(`/api/workflows/${encodeURIComponent(match.id)}`, { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch workflow by name')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const wf = json?.data
|
|
||||||
if (!wf?.state?.blocks) {
|
|
||||||
await this.markToolComplete(422, 'Workflow state is empty or invalid')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert state to the same string format as get_user_workflow
|
|
||||||
const workflowState = {
|
|
||||||
blocks: wf.state.blocks || {},
|
|
||||||
edges: wf.state.edges || [],
|
|
||||||
loops: wf.state.loops || {},
|
|
||||||
parallels: wf.state.parallels || {},
|
|
||||||
}
|
|
||||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
|
||||||
const sanitizedState = sanitizeForCopilot(workflowState)
|
|
||||||
const userWorkflow = JSON.stringify(sanitizedState, null, 2)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, `Retrieved workflow ${workflowName}`, { userWorkflow })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message || 'Failed to retrieve workflow by name')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListChecks, Loader2, X, XCircle } from 'lucide-react'
|
import { ListChecks, Loader2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,8 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
|
|
||||||
const logger = createLogger('ListUserWorkflowsClientTool')
|
|
||||||
|
|
||||||
export class ListUserWorkflowsClientTool extends BaseClientTool {
|
export class ListUserWorkflowsClientTool extends BaseClientTool {
|
||||||
static readonly id = 'list_user_workflows'
|
static readonly id = 'list_user_workflows'
|
||||||
|
|
||||||
@@ -27,34 +24,6 @@ export class ListUserWorkflowsClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
try {
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/workflows', { method: 'GET' })
|
|
||||||
if (!res.ok) {
|
|
||||||
const text = await res.text().catch(() => '')
|
|
||||||
await this.markToolComplete(res.status, text || 'Failed to fetch workflows')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const workflows = Array.isArray(json?.data) ? json.data : []
|
|
||||||
const names = workflows
|
|
||||||
.map((w: any) => (typeof w?.name === 'string' ? w.name : null))
|
|
||||||
.filter((n: string | null) => !!n)
|
|
||||||
|
|
||||||
logger.info('Found workflows', { count: names.length })
|
|
||||||
|
|
||||||
await this.markToolComplete(200, `Found ${names.length} workflow(s)`, {
|
|
||||||
workflow_names: names,
|
|
||||||
})
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message || 'Failed to list workflows')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,23 +1,9 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Server, XCircle } from 'lucide-react'
|
import { Loader2, Server, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface ListWorkspaceMcpServersArgs {
|
|
||||||
workspaceId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WorkspaceMcpServer {
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
description: string | null
|
|
||||||
toolCount: number
|
|
||||||
toolNames: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* List workspace MCP servers tool.
|
* List workspace MCP servers tool.
|
||||||
@@ -50,63 +36,6 @@ export class ListWorkspaceMcpServersClientTool extends BaseClientTool {
|
|||||||
interrupt: undefined,
|
interrupt: undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(args?: ListWorkspaceMcpServersArgs): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('ListWorkspaceMcpServersClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Get workspace ID from active workflow if not provided
|
|
||||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
|
||||||
let workspaceId = args?.workspaceId
|
|
||||||
|
|
||||||
if (!workspaceId && activeWorkflowId) {
|
|
||||||
workspaceId = workflows[activeWorkflowId]?.workspaceId
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('No workspace ID available')
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/mcp/workflow-servers?workspaceId=${workspaceId}`)
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const data = await res.json().catch(() => ({}))
|
|
||||||
throw new Error(data.error || `Failed to fetch MCP servers (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await res.json()
|
|
||||||
const servers: WorkspaceMcpServer[] = (data.data?.servers || []).map((s: any) => ({
|
|
||||||
id: s.id,
|
|
||||||
name: s.name,
|
|
||||||
description: s.description,
|
|
||||||
toolCount: s.toolCount || 0,
|
|
||||||
toolNames: s.toolNames || [],
|
|
||||||
}))
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
|
|
||||||
if (servers.length === 0) {
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
'No MCP servers found in this workspace. Use create_workspace_mcp_server to create one.',
|
|
||||||
{ servers: [], count: 0 }
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`Found ${servers.length} MCP server(s) in the workspace.`,
|
|
||||||
{
|
|
||||||
servers,
|
|
||||||
count: servers.length,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Listed ${servers.length} MCP servers`)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Failed to list MCP servers', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to list MCP servers')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,57 +1,16 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Check, Loader2, Plus, X, XCircle } from 'lucide-react'
|
import { Check, Loader2, Plus, X, XCircle } from 'lucide-react'
|
||||||
import { client } from '@/lib/auth/auth-client'
|
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface CustomToolSchema {
|
|
||||||
type: 'function'
|
|
||||||
function: {
|
|
||||||
name: string
|
|
||||||
description?: string
|
|
||||||
parameters: {
|
|
||||||
type: string
|
|
||||||
properties: Record<string, any>
|
|
||||||
required?: string[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ManageCustomToolArgs {
|
|
||||||
operation: 'add' | 'edit' | 'delete' | 'list'
|
|
||||||
toolId?: string
|
|
||||||
schema?: CustomToolSchema
|
|
||||||
code?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
const API_ENDPOINT = '/api/tools/custom'
|
|
||||||
|
|
||||||
async function checkCustomToolsPermission(): Promise<void> {
|
|
||||||
const activeOrgResponse = await client.organization.getFullOrganization()
|
|
||||||
const organizationId = activeOrgResponse.data?.id
|
|
||||||
if (!organizationId) return
|
|
||||||
|
|
||||||
const response = await fetch(`/api/permission-groups/user?organizationId=${organizationId}`)
|
|
||||||
if (!response.ok) return
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
if (data?.config?.disableCustomTools) {
|
|
||||||
throw new Error('Custom tools are not allowed based on your permission group settings')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Client tool for creating, editing, and deleting custom tools via the copilot.
|
* Client tool for creating, editing, and deleting custom tools via the copilot.
|
||||||
*/
|
*/
|
||||||
export class ManageCustomToolClientTool extends BaseClientTool {
|
export class ManageCustomToolClientTool extends BaseClientTool {
|
||||||
static readonly id = 'manage_custom_tool'
|
static readonly id = 'manage_custom_tool'
|
||||||
private currentArgs?: ManageCustomToolArgs
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
constructor(toolCallId: string) {
|
||||||
super(toolCallId, ManageCustomToolClientTool.id, ManageCustomToolClientTool.metadata)
|
super(toolCallId, ManageCustomToolClientTool.id, ManageCustomToolClientTool.metadata)
|
||||||
@@ -148,261 +107,7 @@ export class ManageCustomToolClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
* Gets the tool call args from the copilot store (needed before execute() is called)
|
// Client tool provides UI metadata only for rendering tool call cards
|
||||||
*/
|
// Interrupts (edit/delete operations) are auto-executed in headless mode
|
||||||
private getArgsFromStore(): ManageCustomToolArgs | undefined {
|
|
||||||
try {
|
|
||||||
const { toolCallsById } = useCopilotStore.getState()
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
return (toolCall as any)?.params as ManageCustomToolArgs | undefined
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override getInterruptDisplays to only show confirmation for edit and delete operations.
|
|
||||||
* Add operations execute directly without confirmation.
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const args = this.currentArgs || this.getArgsFromStore()
|
|
||||||
const operation = args?.operation
|
|
||||||
if (operation === 'edit' || operation === 'delete') {
|
|
||||||
return this.metadata.interrupt
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: ManageCustomToolArgs): Promise<void> {
|
|
||||||
const logger = createLogger('ManageCustomToolClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
await this.executeOperation(args, logger)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to manage custom tool', {
|
|
||||||
success: false,
|
|
||||||
error: e?.message || 'Failed to manage custom tool',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: ManageCustomToolArgs): Promise<void> {
|
|
||||||
this.currentArgs = args
|
|
||||||
if (args?.operation === 'add' || args?.operation === 'list') {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Executes the custom tool operation (add, edit, delete, or list)
|
|
||||||
*/
|
|
||||||
private async executeOperation(
|
|
||||||
args: ManageCustomToolArgs | undefined,
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
if (!args?.operation) {
|
|
||||||
throw new Error('Operation is required')
|
|
||||||
}
|
|
||||||
|
|
||||||
await checkCustomToolsPermission()
|
|
||||||
|
|
||||||
const { operation, toolId, schema, code } = args
|
|
||||||
|
|
||||||
const { hydration } = useWorkflowRegistry.getState()
|
|
||||||
const workspaceId = hydration.workspaceId
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('No active workspace found')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Executing custom tool operation: ${operation}`, {
|
|
||||||
operation,
|
|
||||||
toolId,
|
|
||||||
functionName: schema?.function?.name,
|
|
||||||
workspaceId,
|
|
||||||
})
|
|
||||||
|
|
||||||
switch (operation) {
|
|
||||||
case 'add':
|
|
||||||
await this.addCustomTool({ schema, code, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
case 'edit':
|
|
||||||
await this.editCustomTool({ toolId, schema, code, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
case 'delete':
|
|
||||||
await this.deleteCustomTool({ toolId, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
case 'list':
|
|
||||||
await this.markToolComplete(200, 'Listed custom tools')
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown operation: ${operation}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new custom tool
|
|
||||||
*/
|
|
||||||
private async addCustomTool(
|
|
||||||
params: {
|
|
||||||
schema?: CustomToolSchema
|
|
||||||
code?: string
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { schema, code, workspaceId } = params
|
|
||||||
|
|
||||||
if (!schema) {
|
|
||||||
throw new Error('Schema is required for adding a custom tool')
|
|
||||||
}
|
|
||||||
if (!code) {
|
|
||||||
throw new Error('Code is required for adding a custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const functionName = schema.function.name
|
|
||||||
|
|
||||||
const response = await fetch(API_ENDPOINT, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
tools: [{ title: functionName, schema, code }],
|
|
||||||
workspaceId,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to create custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!data.data || !Array.isArray(data.data) || data.data.length === 0) {
|
|
||||||
throw new Error('Invalid API response: missing tool data')
|
|
||||||
}
|
|
||||||
|
|
||||||
const createdTool = data.data[0]
|
|
||||||
logger.info(`Created custom tool: ${functionName}`, { toolId: createdTool.id })
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Created custom tool "${functionName}"`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'add',
|
|
||||||
toolId: createdTool.id,
|
|
||||||
functionName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates an existing custom tool
|
|
||||||
*/
|
|
||||||
private async editCustomTool(
|
|
||||||
params: {
|
|
||||||
toolId?: string
|
|
||||||
schema?: CustomToolSchema
|
|
||||||
code?: string
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { toolId, schema, code, workspaceId } = params
|
|
||||||
|
|
||||||
if (!toolId) {
|
|
||||||
throw new Error('Tool ID is required for editing a custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!schema && !code) {
|
|
||||||
throw new Error('At least one of schema or code must be provided for editing')
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingResponse = await fetch(`${API_ENDPOINT}?workspaceId=${workspaceId}`)
|
|
||||||
const existingData = await existingResponse.json()
|
|
||||||
|
|
||||||
if (!existingResponse.ok) {
|
|
||||||
throw new Error(existingData.error || 'Failed to fetch existing tools')
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingTool = existingData.data?.find((t: any) => t.id === toolId)
|
|
||||||
if (!existingTool) {
|
|
||||||
throw new Error(`Tool with ID ${toolId} not found`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergedSchema = schema ?? existingTool.schema
|
|
||||||
const updatedTool = {
|
|
||||||
id: toolId,
|
|
||||||
title: mergedSchema.function.name,
|
|
||||||
schema: mergedSchema,
|
|
||||||
code: code ?? existingTool.code,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(API_ENDPOINT, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
tools: [updatedTool],
|
|
||||||
workspaceId,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to update custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const functionName = updatedTool.schema.function.name
|
|
||||||
logger.info(`Updated custom tool: ${functionName}`, { toolId })
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Updated custom tool "${functionName}"`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'edit',
|
|
||||||
toolId,
|
|
||||||
functionName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes a custom tool
|
|
||||||
*/
|
|
||||||
private async deleteCustomTool(
|
|
||||||
params: {
|
|
||||||
toolId?: string
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { toolId, workspaceId } = params
|
|
||||||
|
|
||||||
if (!toolId) {
|
|
||||||
throw new Error('Tool ID is required for deleting a custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${API_ENDPOINT}?id=${toolId}&workspaceId=${workspaceId}`
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to delete custom tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Deleted custom tool: ${toolId}`)
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Deleted custom tool`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'delete',
|
|
||||||
toolId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,51 +1,15 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Check, Loader2, Server, X, XCircle } from 'lucide-react'
|
import { Check, Loader2, Server, X, XCircle } from 'lucide-react'
|
||||||
import { client } from '@/lib/auth/auth-client'
|
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface McpServerConfig {
|
|
||||||
name: string
|
|
||||||
transport: 'streamable-http'
|
|
||||||
url?: string
|
|
||||||
headers?: Record<string, string>
|
|
||||||
timeout?: number
|
|
||||||
enabled?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ManageMcpToolArgs {
|
|
||||||
operation: 'add' | 'edit' | 'delete'
|
|
||||||
serverId?: string
|
|
||||||
config?: McpServerConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
const API_ENDPOINT = '/api/mcp/servers'
|
|
||||||
|
|
||||||
async function checkMcpToolsPermission(): Promise<void> {
|
|
||||||
const activeOrgResponse = await client.organization.getFullOrganization()
|
|
||||||
const organizationId = activeOrgResponse.data?.id
|
|
||||||
if (!organizationId) return
|
|
||||||
|
|
||||||
const response = await fetch(`/api/permission-groups/user?organizationId=${organizationId}`)
|
|
||||||
if (!response.ok) return
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
if (data?.config?.disableMcpTools) {
|
|
||||||
throw new Error('MCP tools are not allowed based on your permission group settings')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Client tool for creating, editing, and deleting MCP tool servers via the copilot.
|
* Client tool for creating, editing, and deleting MCP tool servers via the copilot.
|
||||||
*/
|
*/
|
||||||
export class ManageMcpToolClientTool extends BaseClientTool {
|
export class ManageMcpToolClientTool extends BaseClientTool {
|
||||||
static readonly id = 'manage_mcp_tool'
|
static readonly id = 'manage_mcp_tool'
|
||||||
private currentArgs?: ManageMcpToolArgs
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
constructor(toolCallId: string) {
|
||||||
super(toolCallId, ManageMcpToolClientTool.id, ManageMcpToolClientTool.metadata)
|
super(toolCallId, ManageMcpToolClientTool.id, ManageMcpToolClientTool.metadata)
|
||||||
@@ -121,240 +85,7 @@ export class ManageMcpToolClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
* Gets the tool call args from the copilot store (needed before execute() is called)
|
// Client tool provides UI metadata only for rendering tool call cards
|
||||||
*/
|
// Interrupts (edit/delete operations) are auto-executed in headless mode
|
||||||
private getArgsFromStore(): ManageMcpToolArgs | undefined {
|
|
||||||
try {
|
|
||||||
const { toolCallsById } = useCopilotStore.getState()
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
return (toolCall as any)?.params as ManageMcpToolArgs | undefined
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override getInterruptDisplays to only show confirmation for edit and delete operations.
|
|
||||||
* Add operations execute directly without confirmation.
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const args = this.currentArgs || this.getArgsFromStore()
|
|
||||||
const operation = args?.operation
|
|
||||||
if (operation === 'edit' || operation === 'delete') {
|
|
||||||
return this.metadata.interrupt
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: ManageMcpToolArgs): Promise<void> {
|
|
||||||
const logger = createLogger('ManageMcpToolClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
await this.executeOperation(args, logger)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to manage MCP tool', {
|
|
||||||
success: false,
|
|
||||||
error: e?.message || 'Failed to manage MCP tool',
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: ManageMcpToolArgs): Promise<void> {
|
|
||||||
this.currentArgs = args
|
|
||||||
if (args?.operation === 'add') {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Executes the MCP tool operation (add, edit, or delete)
|
|
||||||
*/
|
|
||||||
private async executeOperation(
|
|
||||||
args: ManageMcpToolArgs | undefined,
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
if (!args?.operation) {
|
|
||||||
throw new Error('Operation is required')
|
|
||||||
}
|
|
||||||
|
|
||||||
await checkMcpToolsPermission()
|
|
||||||
|
|
||||||
const { operation, serverId, config } = args
|
|
||||||
|
|
||||||
const { hydration } = useWorkflowRegistry.getState()
|
|
||||||
const workspaceId = hydration.workspaceId
|
|
||||||
if (!workspaceId) {
|
|
||||||
throw new Error('No active workspace found')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Executing MCP tool operation: ${operation}`, {
|
|
||||||
operation,
|
|
||||||
serverId,
|
|
||||||
serverName: config?.name,
|
|
||||||
workspaceId,
|
|
||||||
})
|
|
||||||
|
|
||||||
switch (operation) {
|
|
||||||
case 'add':
|
|
||||||
await this.addMcpServer({ config, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
case 'edit':
|
|
||||||
await this.editMcpServer({ serverId, config, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
case 'delete':
|
|
||||||
await this.deleteMcpServer({ serverId, workspaceId }, logger)
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown operation: ${operation}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new MCP server
|
|
||||||
*/
|
|
||||||
private async addMcpServer(
|
|
||||||
params: {
|
|
||||||
config?: McpServerConfig
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { config, workspaceId } = params
|
|
||||||
|
|
||||||
if (!config) {
|
|
||||||
throw new Error('Config is required for adding an MCP tool')
|
|
||||||
}
|
|
||||||
if (!config.name) {
|
|
||||||
throw new Error('Server name is required')
|
|
||||||
}
|
|
||||||
if (!config.url) {
|
|
||||||
throw new Error('Server URL is required for streamable-http transport')
|
|
||||||
}
|
|
||||||
|
|
||||||
const serverData = {
|
|
||||||
...config,
|
|
||||||
workspaceId,
|
|
||||||
transport: config.transport || 'streamable-http',
|
|
||||||
timeout: config.timeout || 30000,
|
|
||||||
enabled: config.enabled !== false,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(API_ENDPOINT, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(serverData),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to create MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const serverId = data.data?.serverId
|
|
||||||
logger.info(`Created MCP tool: ${config.name}`, { serverId })
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Created MCP tool "${config.name}"`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'add',
|
|
||||||
serverId,
|
|
||||||
serverName: config.name,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Updates an existing MCP server
|
|
||||||
*/
|
|
||||||
private async editMcpServer(
|
|
||||||
params: {
|
|
||||||
serverId?: string
|
|
||||||
config?: McpServerConfig
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { serverId, config, workspaceId } = params
|
|
||||||
|
|
||||||
if (!serverId) {
|
|
||||||
throw new Error('Server ID is required for editing an MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!config) {
|
|
||||||
throw new Error('Config is required for editing an MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const updateData = {
|
|
||||||
...config,
|
|
||||||
workspaceId,
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(`${API_ENDPOINT}/${serverId}?workspaceId=${workspaceId}`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify(updateData),
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to update MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const serverName = config.name || data.data?.server?.name || serverId
|
|
||||||
logger.info(`Updated MCP tool: ${serverName}`, { serverId })
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Updated MCP tool "${serverName}"`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'edit',
|
|
||||||
serverId,
|
|
||||||
serverName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deletes an MCP server
|
|
||||||
*/
|
|
||||||
private async deleteMcpServer(
|
|
||||||
params: {
|
|
||||||
serverId?: string
|
|
||||||
workspaceId: string
|
|
||||||
},
|
|
||||||
logger: ReturnType<typeof createLogger>
|
|
||||||
): Promise<void> {
|
|
||||||
const { serverId, workspaceId } = params
|
|
||||||
|
|
||||||
if (!serverId) {
|
|
||||||
throw new Error('Server ID is required for deleting an MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = `${API_ENDPOINT}?serverId=${serverId}&workspaceId=${workspaceId}`
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
})
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(data.error || 'Failed to delete MCP tool')
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Deleted MCP tool: ${serverId}`)
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, `Deleted MCP tool`, {
|
|
||||||
success: true,
|
|
||||||
operation: 'delete',
|
|
||||||
serverId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,15 +1,12 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
export class RedeployClientTool extends BaseClientTool {
|
export class RedeployClientTool extends BaseClientTool {
|
||||||
static readonly id = 'redeploy'
|
static readonly id = 'redeploy'
|
||||||
private hasExecuted = false
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
constructor(toolCallId: string) {
|
||||||
super(toolCallId, RedeployClientTool.id, RedeployClientTool.metadata)
|
super(toolCallId, RedeployClientTool.id, RedeployClientTool.metadata)
|
||||||
@@ -28,44 +25,6 @@ export class RedeployClientTool extends BaseClientTool {
|
|||||||
interrupt: undefined,
|
interrupt: undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
const logger = createLogger('RedeployClientTool')
|
// Client tool provides UI metadata only
|
||||||
try {
|
|
||||||
if (this.hasExecuted) {
|
|
||||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
this.hasExecuted = true
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
throw new Error('No workflow ID provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`/api/workflows/${activeWorkflowId}/deploy`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ deployChatEnabled: false }),
|
|
||||||
})
|
|
||||||
|
|
||||||
const json = await res.json().catch(() => ({}))
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = json?.error || `Server error (${res.status})`
|
|
||||||
throw new Error(errorText)
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Workflow redeployed', {
|
|
||||||
workflowId: activeWorkflowId,
|
|
||||||
deployedAt: json?.deployedAt || null,
|
|
||||||
schedule: json?.schedule,
|
|
||||||
})
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error('Redeploy failed', { message: error?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, error?.message || 'Failed to redeploy workflow')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,23 +1,12 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, MinusCircle, Play, XCircle } from 'lucide-react'
|
import { Loader2, MinusCircle, Play, XCircle } from 'lucide-react'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
type BaseClientToolMetadata,
|
type BaseClientToolMetadata,
|
||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
WORKFLOW_EXECUTION_TIMEOUT_MS,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { executeWorkflowWithFullLogging } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
|
|
||||||
import { useExecutionStore } from '@/stores/execution'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
|
||||||
interface RunWorkflowArgs {
|
|
||||||
workflowId?: string
|
|
||||||
description?: string
|
|
||||||
workflow_input?: Record<string, any>
|
|
||||||
}
|
|
||||||
|
|
||||||
export class RunWorkflowClientTool extends BaseClientTool {
|
export class RunWorkflowClientTool extends BaseClientTool {
|
||||||
static readonly id = 'run_workflow'
|
static readonly id = 'run_workflow'
|
||||||
|
|
||||||
@@ -112,119 +101,9 @@ export class RunWorkflowClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only for rendering tool call cards
|
||||||
this.setState(ClientToolCallState.rejected)
|
// Workflow execution happens entirely on the server
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: RunWorkflowArgs): Promise<void> {
|
|
||||||
const logger = createLogger('RunWorkflowClientTool')
|
|
||||||
|
|
||||||
// Use longer timeout for workflow execution (10 minutes)
|
|
||||||
await this.executeWithTimeout(async () => {
|
|
||||||
const params = args || {}
|
|
||||||
logger.debug('handleAccept() called', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
state: this.getState(),
|
|
||||||
hasArgs: !!args,
|
|
||||||
argKeys: args ? Object.keys(args) : [],
|
|
||||||
})
|
|
||||||
|
|
||||||
// prevent concurrent execution
|
|
||||||
const { isExecuting, setIsExecuting } = useExecutionStore.getState()
|
|
||||||
if (isExecuting) {
|
|
||||||
logger.debug('Execution prevented: already executing')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(
|
|
||||||
409,
|
|
||||||
'The workflow is already in the middle of an execution. Try again later'
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
logger.debug('Execution prevented: no active workflow')
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'No active workflow found')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
logger.debug('Using active workflow', { activeWorkflowId })
|
|
||||||
|
|
||||||
const workflowInput = params.workflow_input || undefined
|
|
||||||
if (workflowInput) {
|
|
||||||
logger.debug('Workflow input provided', {
|
|
||||||
inputFields: Object.keys(workflowInput),
|
|
||||||
inputPreview: JSON.stringify(workflowInput).slice(0, 120),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
setIsExecuting(true)
|
|
||||||
logger.debug('Set isExecuting(true) and switching state to executing')
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const executionId = uuidv4()
|
|
||||||
const executionStartTime = new Date().toISOString()
|
|
||||||
logger.debug('Starting workflow execution', {
|
|
||||||
executionStartTime,
|
|
||||||
executionId,
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
})
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await executeWorkflowWithFullLogging({
|
|
||||||
workflowInput,
|
|
||||||
executionId,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Determine success for both non-streaming and streaming executions
|
|
||||||
let succeeded = true
|
|
||||||
let errorMessage: string | undefined
|
|
||||||
try {
|
|
||||||
if (result && typeof result === 'object' && 'success' in (result as any)) {
|
|
||||||
succeeded = Boolean((result as any).success)
|
|
||||||
if (!succeeded) {
|
|
||||||
errorMessage = (result as any)?.error || (result as any)?.output?.error
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
result &&
|
|
||||||
typeof result === 'object' &&
|
|
||||||
'execution' in (result as any) &&
|
|
||||||
(result as any).execution &&
|
|
||||||
typeof (result as any).execution === 'object'
|
|
||||||
) {
|
|
||||||
succeeded = Boolean((result as any).execution.success)
|
|
||||||
if (!succeeded) {
|
|
||||||
errorMessage =
|
|
||||||
(result as any).execution?.error || (result as any).execution?.output?.error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
if (succeeded) {
|
|
||||||
logger.debug('Workflow execution finished with success')
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(
|
|
||||||
200,
|
|
||||||
`Workflow execution completed. Started at: ${executionStartTime}`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const msg = errorMessage || 'Workflow execution failed'
|
|
||||||
logger.error('Workflow execution finished with failure', { message: msg })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, msg)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
// Always clean up execution state
|
|
||||||
setIsExecuting(false)
|
|
||||||
}
|
|
||||||
}, WORKFLOW_EXECUTION_TIMEOUT_MS)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: RunWorkflowArgs): Promise<void> {
|
|
||||||
// For compatibility if execute() is explicitly invoked, route to handleAccept
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Settings2, X, XCircle } from 'lucide-react'
|
import { Loader2, Settings2, X, XCircle } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
BaseClientTool,
|
BaseClientTool,
|
||||||
@@ -6,20 +5,6 @@ import {
|
|||||||
ClientToolCallState,
|
ClientToolCallState,
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
} from '@/lib/copilot/tools/client/base-tool'
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
interface OperationItem {
|
|
||||||
operation: 'add' | 'edit' | 'delete'
|
|
||||||
name: string
|
|
||||||
type?: 'plain' | 'number' | 'boolean' | 'array' | 'object'
|
|
||||||
value?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SetGlobalVarsArgs {
|
|
||||||
operations: OperationItem[]
|
|
||||||
workflowId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class SetGlobalWorkflowVariablesClientTool extends BaseClientTool {
|
export class SetGlobalWorkflowVariablesClientTool extends BaseClientTool {
|
||||||
static readonly id = 'set_global_workflow_variables'
|
static readonly id = 'set_global_workflow_variables'
|
||||||
@@ -105,170 +90,8 @@ export class SetGlobalWorkflowVariablesClientTool extends BaseClientTool {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
// Executed server-side via handleToolCallEvent in stream-handler.ts
|
||||||
await super.handleReject()
|
// Client tool provides UI metadata only
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: SetGlobalVarsArgs): Promise<void> {
|
|
||||||
const logger = createLogger('SetGlobalWorkflowVariablesClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
const payload: SetGlobalVarsArgs = { ...(args || { operations: [] }) }
|
|
||||||
if (!payload.workflowId) {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (activeWorkflowId) payload.workflowId = activeWorkflowId
|
|
||||||
}
|
|
||||||
if (!payload.workflowId) {
|
|
||||||
throw new Error('No active workflow found')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch current variables so we can construct full array payload
|
|
||||||
const getRes = await fetch(`/api/workflows/${payload.workflowId}/variables`, {
|
|
||||||
method: 'GET',
|
|
||||||
})
|
|
||||||
if (!getRes.ok) {
|
|
||||||
const txt = await getRes.text().catch(() => '')
|
|
||||||
throw new Error(txt || 'Failed to load current variables')
|
|
||||||
}
|
|
||||||
const currentJson = await getRes.json()
|
|
||||||
const currentVarsRecord = (currentJson?.data as Record<string, any>) || {}
|
|
||||||
|
|
||||||
// Helper to convert string -> typed value
|
|
||||||
function coerceValue(
|
|
||||||
value: string | undefined,
|
|
||||||
type?: 'plain' | 'number' | 'boolean' | 'array' | 'object'
|
|
||||||
) {
|
|
||||||
if (value === undefined) return value
|
|
||||||
const t = type || 'plain'
|
|
||||||
try {
|
|
||||||
if (t === 'number') {
|
|
||||||
const n = Number(value)
|
|
||||||
if (Number.isNaN(n)) return value
|
|
||||||
return n
|
|
||||||
}
|
|
||||||
if (t === 'boolean') {
|
|
||||||
const v = String(value).trim().toLowerCase()
|
|
||||||
if (v === 'true') return true
|
|
||||||
if (v === 'false') return false
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
if (t === 'array' || t === 'object') {
|
|
||||||
const parsed = JSON.parse(value)
|
|
||||||
if (t === 'array' && Array.isArray(parsed)) return parsed
|
|
||||||
if (t === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed))
|
|
||||||
return parsed
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build mutable map by variable name
|
|
||||||
const byName: Record<string, any> = {}
|
|
||||||
Object.values(currentVarsRecord).forEach((v: any) => {
|
|
||||||
if (v && typeof v === 'object' && v.id && v.name) byName[String(v.name)] = v
|
|
||||||
})
|
|
||||||
|
|
||||||
// Apply operations in order
|
|
||||||
for (const op of payload.operations || []) {
|
|
||||||
const key = String(op.name)
|
|
||||||
const nextType = (op.type as any) || byName[key]?.type || 'plain'
|
|
||||||
if (op.operation === 'delete') {
|
|
||||||
delete byName[key]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
const typedValue = coerceValue(op.value, nextType)
|
|
||||||
if (op.operation === 'add') {
|
|
||||||
byName[key] = {
|
|
||||||
id: crypto.randomUUID(),
|
|
||||||
workflowId: payload.workflowId,
|
|
||||||
name: key,
|
|
||||||
type: nextType,
|
|
||||||
value: typedValue,
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if (op.operation === 'edit') {
|
|
||||||
if (!byName[key]) {
|
|
||||||
// If editing a non-existent variable, create it
|
|
||||||
byName[key] = {
|
|
||||||
id: crypto.randomUUID(),
|
|
||||||
workflowId: payload.workflowId,
|
|
||||||
name: key,
|
|
||||||
type: nextType,
|
|
||||||
value: typedValue,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
byName[key] = {
|
|
||||||
...byName[key],
|
|
||||||
type: nextType,
|
|
||||||
...(op.value !== undefined ? { value: typedValue } : {}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert byName (keyed by name) to record keyed by ID for the API
|
|
||||||
const variablesRecord: Record<string, any> = {}
|
|
||||||
for (const v of Object.values(byName)) {
|
|
||||||
variablesRecord[v.id] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
// POST full variables record to persist
|
|
||||||
const res = await fetch(`/api/workflows/${payload.workflowId}/variables`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ variables: variablesRecord }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Failed to update variables (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
if (activeWorkflowId) {
|
|
||||||
// Fetch the updated variables from the API
|
|
||||||
const refreshRes = await fetch(`/api/workflows/${activeWorkflowId}/variables`, {
|
|
||||||
method: 'GET',
|
|
||||||
})
|
|
||||||
|
|
||||||
if (refreshRes.ok) {
|
|
||||||
const refreshJson = await refreshRes.json()
|
|
||||||
const updatedVarsRecord = (refreshJson?.data as Record<string, any>) || {}
|
|
||||||
|
|
||||||
// Update the variables store with the fresh data
|
|
||||||
useVariablesStore.setState((state) => {
|
|
||||||
// Remove old variables for this workflow
|
|
||||||
const withoutWorkflow = Object.fromEntries(
|
|
||||||
Object.entries(state.variables).filter(([, v]) => v.workflowId !== activeWorkflowId)
|
|
||||||
)
|
|
||||||
// Add the updated variables
|
|
||||||
return {
|
|
||||||
variables: { ...withoutWorkflow, ...updatedVarsRecord },
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Refreshed variables in store', { workflowId: activeWorkflowId })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (refreshError) {
|
|
||||||
logger.warn('Failed to refresh variables in store', { error: refreshError })
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Workflow variables updated', { variables: byName })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
const message = e instanceof Error ? e.message : String(e)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, message || 'Failed to set workflow variables')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: SetGlobalVarsArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Register UI config at module load
|
// Register UI config at module load
|
||||||
|
|||||||
@@ -1,4 +1,12 @@
|
|||||||
export interface BaseServerTool<TArgs = any, TResult = any> {
|
/**
|
||||||
|
* Base interface for server-executed tools.
|
||||||
|
*
|
||||||
|
* @template TArgs - The type of arguments the tool accepts
|
||||||
|
* @template TResult - The type of result the tool returns
|
||||||
|
*/
|
||||||
|
export interface BaseServerTool<TArgs = unknown, TResult = unknown> {
|
||||||
|
/** The canonical name of the tool (must match the registry key) */
|
||||||
name: string
|
name: string
|
||||||
|
/** Execute the tool with the given arguments and context */
|
||||||
execute(args: TArgs, context?: { userId: string }): Promise<TResult>
|
execute(args: TArgs, context?: { userId: string }): Promise<TResult>
|
||||||
}
|
}
|
||||||
|
|||||||
81
apps/sim/lib/copilot/tools/server/context/set-context.ts
Normal file
81
apps/sim/lib/copilot/tools/server/context/set-context.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
/**
|
||||||
|
* Set Context Server Tool
|
||||||
|
*
|
||||||
|
* Allows headless mode sessions to dynamically set the workflow context.
|
||||||
|
* When called, this tool validates that the user has access to the specified
|
||||||
|
* workflow and returns the resolved context (including workspaceId).
|
||||||
|
*
|
||||||
|
* Go copilot should update its internal session state with the returned context
|
||||||
|
* and include it in subsequent tool_call events.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { verifyWorkflowAccess } from '@/lib/copilot/auth/permissions'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('SetContextServerTool')
|
||||||
|
|
||||||
|
export interface SetContextParams {
|
||||||
|
/** The workflow ID to set as the current context */
|
||||||
|
workflowId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SetContextResult {
|
||||||
|
success: boolean
|
||||||
|
/** The resolved execution context - Go should store this and include in tool_call events */
|
||||||
|
executionContext: {
|
||||||
|
workflowId: string
|
||||||
|
workspaceId?: string
|
||||||
|
userId: string
|
||||||
|
}
|
||||||
|
message: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setContextServerTool: BaseServerTool<SetContextParams, SetContextResult> = {
|
||||||
|
name: 'set_context',
|
||||||
|
|
||||||
|
async execute(params: SetContextParams, context?: { userId: string }): Promise<SetContextResult> {
|
||||||
|
if (!context?.userId) {
|
||||||
|
logger.error('Unauthorized attempt to set context - no authenticated user')
|
||||||
|
throw new Error('Authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflowId } = params
|
||||||
|
|
||||||
|
if (!workflowId) {
|
||||||
|
throw new Error('workflowId is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Setting execution context', {
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Verify the user has access to this workflow
|
||||||
|
const { hasAccess, workspaceId } = await verifyWorkflowAccess(context.userId, workflowId)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('User does not have access to workflow', {
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
})
|
||||||
|
throw new Error(`Access denied to workflow ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Context set successfully', {
|
||||||
|
workflowId,
|
||||||
|
workspaceId,
|
||||||
|
userId: context.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
executionContext: {
|
||||||
|
workflowId,
|
||||||
|
workspaceId,
|
||||||
|
userId: context.userId,
|
||||||
|
},
|
||||||
|
message: `Context set to workflow ${workflowId}${workspaceId ? ` (workspace: ${workspaceId})` : ''}`,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
44
apps/sim/lib/copilot/tools/server/other/checkoff-todo.ts
Normal file
44
apps/sim/lib/copilot/tools/server/other/checkoff-todo.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('CheckoffTodoServerTool')
|
||||||
|
|
||||||
|
export const CheckoffTodoInput = z.object({
|
||||||
|
id: z.string().optional(),
|
||||||
|
todoId: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const CheckoffTodoResult = z.object({
|
||||||
|
todoId: z.string(),
|
||||||
|
success: z.boolean(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type CheckoffTodoInputType = z.infer<typeof CheckoffTodoInput>
|
||||||
|
export type CheckoffTodoResultType = z.infer<typeof CheckoffTodoResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Server-side tool to mark a todo as complete.
|
||||||
|
* The actual UI update happens client-side when the store receives the tool_result event.
|
||||||
|
*/
|
||||||
|
export const checkoffTodoServerTool: BaseServerTool<CheckoffTodoInputType, CheckoffTodoResultType> =
|
||||||
|
{
|
||||||
|
name: 'checkoff_todo',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = CheckoffTodoInput.parse(args)
|
||||||
|
const todoId = parsed.id || parsed.todoId
|
||||||
|
|
||||||
|
if (!todoId) {
|
||||||
|
throw new Error('Missing todo id')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Marking todo as complete', { todoId })
|
||||||
|
|
||||||
|
// The actual state update happens client-side via tool_result handler
|
||||||
|
// We just return success to signal the action was processed
|
||||||
|
return CheckoffTodoResult.parse({
|
||||||
|
todoId,
|
||||||
|
success: true,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('MarkTodoInProgressServerTool')
|
||||||
|
|
||||||
|
export const MarkTodoInProgressInput = z.object({
|
||||||
|
id: z.string().optional(),
|
||||||
|
todoId: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const MarkTodoInProgressResult = z.object({
|
||||||
|
todoId: z.string(),
|
||||||
|
success: z.boolean(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type MarkTodoInProgressInputType = z.infer<typeof MarkTodoInProgressInput>
|
||||||
|
export type MarkTodoInProgressResultType = z.infer<typeof MarkTodoInProgressResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Server-side tool to mark a todo as in progress.
|
||||||
|
* The actual UI update happens client-side when the store receives the tool_result event.
|
||||||
|
*/
|
||||||
|
export const markTodoInProgressServerTool: BaseServerTool<
|
||||||
|
MarkTodoInProgressInputType,
|
||||||
|
MarkTodoInProgressResultType
|
||||||
|
> = {
|
||||||
|
name: 'mark_todo_in_progress',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = MarkTodoInProgressInput.parse(args)
|
||||||
|
const todoId = parsed.id || parsed.todoId
|
||||||
|
|
||||||
|
if (!todoId) {
|
||||||
|
throw new Error('Missing todo id')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Marking todo as in progress', { todoId })
|
||||||
|
|
||||||
|
// The actual state update happens client-side via tool_result handler
|
||||||
|
return MarkTodoInProgressResult.parse({
|
||||||
|
todoId,
|
||||||
|
success: true,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
45
apps/sim/lib/copilot/tools/server/other/sleep.ts
Normal file
45
apps/sim/lib/copilot/tools/server/other/sleep.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('SleepServerTool')
|
||||||
|
|
||||||
|
/** Maximum sleep duration in seconds (3 minutes) */
|
||||||
|
const MAX_SLEEP_SECONDS = 180
|
||||||
|
|
||||||
|
export const SleepInput = z.object({
|
||||||
|
seconds: z.number().min(0).max(MAX_SLEEP_SECONDS).optional().default(0),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const SleepResult = z.object({
|
||||||
|
sleptFor: z.number(),
|
||||||
|
success: z.boolean(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type SleepInputType = z.infer<typeof SleepInput>
|
||||||
|
export type SleepResultType = z.infer<typeof SleepResult>
|
||||||
|
|
||||||
|
export const sleepServerTool: BaseServerTool<SleepInputType, SleepResultType> = {
|
||||||
|
name: 'sleep',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = SleepInput.parse(args)
|
||||||
|
let seconds = parsed.seconds
|
||||||
|
|
||||||
|
// Clamp to max
|
||||||
|
if (seconds > MAX_SLEEP_SECONDS) {
|
||||||
|
seconds = MAX_SLEEP_SECONDS
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Starting sleep', { seconds })
|
||||||
|
|
||||||
|
// Actually sleep
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
||||||
|
|
||||||
|
logger.info('Sleep completed', { seconds })
|
||||||
|
|
||||||
|
return SleepResult.parse({
|
||||||
|
sleptFor: seconds,
|
||||||
|
success: true,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -1,66 +1,37 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
/**
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
* Server Tool Router
|
||||||
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
|
*
|
||||||
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
|
* This module provides backwards compatibility for the execute-copilot-server-tool API route.
|
||||||
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
|
* It delegates to the new unified registry in server-executor/registry.ts
|
||||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
*
|
||||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
* @deprecated Use executeRegisteredTool from server-executor/registry.ts directly
|
||||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
*/
|
||||||
import { knowledgeBaseServerTool } from '@/lib/copilot/tools/server/knowledge/knowledge-base'
|
|
||||||
import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-api-request'
|
|
||||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
|
||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
|
||||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockConfigInput,
|
|
||||||
GetBlockConfigResult,
|
|
||||||
GetBlockOptionsInput,
|
|
||||||
GetBlockOptionsResult,
|
|
||||||
GetBlocksAndToolsInput,
|
|
||||||
GetBlocksAndToolsResult,
|
|
||||||
GetBlocksMetadataInput,
|
|
||||||
GetBlocksMetadataResult,
|
|
||||||
GetTriggerBlocksInput,
|
|
||||||
GetTriggerBlocksResult,
|
|
||||||
KnowledgeBaseArgsSchema,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
// Generic execute response schemas (success path only for this route; errors handled via HTTP status)
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { executeRegisteredTool, isServerExecutedTool } from '@/lib/copilot/server-executor/registry'
|
||||||
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
|
const logger = createLogger('ServerToolRouter')
|
||||||
|
|
||||||
|
// Re-export for backwards compatibility
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
export type ExecuteResponseSuccess = (typeof ExecuteResponseSuccessSchema)['_type']
|
export type ExecuteResponseSuccess = (typeof ExecuteResponseSuccessSchema)['_type']
|
||||||
|
|
||||||
// Define server tool registry for the new copilot runtime
|
/**
|
||||||
const serverToolRegistry: Record<string, BaseServerTool<any, any>> = {}
|
* Route execution to the appropriate server tool.
|
||||||
const logger = createLogger('ServerToolRouter')
|
*
|
||||||
|
* @deprecated Use executeRegisteredTool from server-executor/registry.ts directly
|
||||||
// Register tools
|
*/
|
||||||
serverToolRegistry[getBlocksAndToolsServerTool.name] = getBlocksAndToolsServerTool
|
|
||||||
serverToolRegistry[getBlocksMetadataServerTool.name] = getBlocksMetadataServerTool
|
|
||||||
serverToolRegistry[getBlockOptionsServerTool.name] = getBlockOptionsServerTool
|
|
||||||
serverToolRegistry[getBlockConfigServerTool.name] = getBlockConfigServerTool
|
|
||||||
serverToolRegistry[getTriggerBlocksServerTool.name] = getTriggerBlocksServerTool
|
|
||||||
serverToolRegistry[editWorkflowServerTool.name] = editWorkflowServerTool
|
|
||||||
serverToolRegistry[getWorkflowConsoleServerTool.name] = getWorkflowConsoleServerTool
|
|
||||||
serverToolRegistry[searchDocumentationServerTool.name] = searchDocumentationServerTool
|
|
||||||
serverToolRegistry[searchOnlineServerTool.name] = searchOnlineServerTool
|
|
||||||
serverToolRegistry[setEnvironmentVariablesServerTool.name] = setEnvironmentVariablesServerTool
|
|
||||||
serverToolRegistry[getCredentialsServerTool.name] = getCredentialsServerTool
|
|
||||||
serverToolRegistry[makeApiRequestServerTool.name] = makeApiRequestServerTool
|
|
||||||
serverToolRegistry[knowledgeBaseServerTool.name] = knowledgeBaseServerTool
|
|
||||||
|
|
||||||
export async function routeExecution(
|
export async function routeExecution(
|
||||||
toolName: string,
|
toolName: string,
|
||||||
payload: unknown,
|
payload: unknown,
|
||||||
context?: { userId: string }
|
context?: { userId: string }
|
||||||
): Promise<any> {
|
): Promise<unknown> {
|
||||||
const tool = serverToolRegistry[toolName]
|
if (!isServerExecutedTool(toolName)) {
|
||||||
if (!tool) {
|
|
||||||
throw new Error(`Unknown server tool: ${toolName}`)
|
throw new Error(`Unknown server tool: ${toolName}`)
|
||||||
}
|
}
|
||||||
logger.debug('Routing to tool', {
|
|
||||||
|
logger.debug('Routing to tool via unified registry', {
|
||||||
toolName,
|
toolName,
|
||||||
payloadPreview: (() => {
|
payloadPreview: (() => {
|
||||||
try {
|
try {
|
||||||
@@ -71,43 +42,15 @@ export async function routeExecution(
|
|||||||
})(),
|
})(),
|
||||||
})
|
})
|
||||||
|
|
||||||
let args: any = payload || {}
|
const result = await executeRegisteredTool(toolName, payload, {
|
||||||
if (toolName === 'get_blocks_and_tools') {
|
userId: context?.userId ?? '',
|
||||||
args = GetBlocksAndToolsInput.parse(args)
|
})
|
||||||
}
|
|
||||||
if (toolName === 'get_blocks_metadata') {
|
// The old API expected the raw result, not wrapped in ToolResult
|
||||||
args = GetBlocksMetadataInput.parse(args)
|
// For backwards compatibility, unwrap and throw on error
|
||||||
}
|
if (!result.success) {
|
||||||
if (toolName === 'get_block_options') {
|
throw new Error(result.error?.message ?? 'Tool execution failed')
|
||||||
args = GetBlockOptionsInput.parse(args)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_block_config') {
|
|
||||||
args = GetBlockConfigInput.parse(args)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_trigger_blocks') {
|
|
||||||
args = GetTriggerBlocksInput.parse(args)
|
|
||||||
}
|
|
||||||
if (toolName === 'knowledge_base') {
|
|
||||||
args = KnowledgeBaseArgsSchema.parse(args)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await tool.execute(args, context)
|
return result.data
|
||||||
|
|
||||||
if (toolName === 'get_blocks_and_tools') {
|
|
||||||
return GetBlocksAndToolsResult.parse(result)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_blocks_metadata') {
|
|
||||||
return GetBlocksMetadataResult.parse(result)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_block_options') {
|
|
||||||
return GetBlockOptionsResult.parse(result)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_block_config') {
|
|
||||||
return GetBlockConfigResult.parse(result)
|
|
||||||
}
|
|
||||||
if (toolName === 'get_trigger_blocks') {
|
|
||||||
return GetTriggerBlocksResult.parse(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,172 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import {
|
||||||
|
chat,
|
||||||
|
workflow,
|
||||||
|
workflowDeploymentVersion,
|
||||||
|
workflowMcpServer,
|
||||||
|
workflowMcpTool,
|
||||||
|
} from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
|
const logger = createLogger('CheckDeploymentStatusServerTool')
|
||||||
|
|
||||||
|
export const CheckDeploymentStatusInput = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const CheckDeploymentStatusResult = z.object({
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
deploymentTypes: z.array(z.string()),
|
||||||
|
api: z.object({
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
deployedAt: z.string().nullable(),
|
||||||
|
endpoint: z.string().nullable(),
|
||||||
|
}),
|
||||||
|
chat: z.object({
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
chatId: z.string().nullable(),
|
||||||
|
identifier: z.string().nullable(),
|
||||||
|
chatUrl: z.string().nullable(),
|
||||||
|
title: z.string().nullable(),
|
||||||
|
}),
|
||||||
|
mcp: z.object({
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
servers: z.array(
|
||||||
|
z.object({
|
||||||
|
serverId: z.string(),
|
||||||
|
serverName: z.string(),
|
||||||
|
toolName: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
message: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type CheckDeploymentStatusInputType = z.infer<typeof CheckDeploymentStatusInput>
|
||||||
|
export type CheckDeploymentStatusResultType = z.infer<typeof CheckDeploymentStatusResult>
|
||||||
|
|
||||||
|
export const checkDeploymentStatusServerTool: BaseServerTool<
|
||||||
|
CheckDeploymentStatusInputType,
|
||||||
|
CheckDeploymentStatusResultType
|
||||||
|
> = {
|
||||||
|
name: 'check_deployment_status',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = CheckDeploymentStatusInput.parse(args)
|
||||||
|
const { workflowId } = parsed
|
||||||
|
|
||||||
|
logger.debug('Checking deployment status', { workflowId })
|
||||||
|
|
||||||
|
// Get workflow to find workspaceId
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const workspaceId = wf?.workspaceId
|
||||||
|
|
||||||
|
// Check API deployment (active deployment version)
|
||||||
|
const [apiDeploy] = await db
|
||||||
|
.select({
|
||||||
|
id: workflowDeploymentVersion.id,
|
||||||
|
createdAt: workflowDeploymentVersion.createdAt,
|
||||||
|
})
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||||
|
eq(workflowDeploymentVersion.isActive, true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const isApiDeployed = !!apiDeploy
|
||||||
|
const appUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||||
|
|
||||||
|
// Check chat deployment
|
||||||
|
const [chatDeploy] = await db
|
||||||
|
.select({
|
||||||
|
id: chat.id,
|
||||||
|
identifier: chat.identifier,
|
||||||
|
title: chat.title,
|
||||||
|
})
|
||||||
|
.from(chat)
|
||||||
|
.where(eq(chat.workflowId, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const isChatDeployed = !!chatDeploy
|
||||||
|
|
||||||
|
// Check MCP deployment
|
||||||
|
let mcpToolDeployments: { serverId: string; serverName: string; toolName: string }[] = []
|
||||||
|
if (workspaceId) {
|
||||||
|
const mcpTools = await db
|
||||||
|
.select({
|
||||||
|
toolName: workflowMcpTool.toolName,
|
||||||
|
serverId: workflowMcpTool.serverId,
|
||||||
|
serverName: workflowMcpServer.name,
|
||||||
|
})
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.innerJoin(workflowMcpServer, eq(workflowMcpTool.serverId, workflowMcpServer.id))
|
||||||
|
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
|
||||||
|
mcpToolDeployments = mcpTools.map((t) => ({
|
||||||
|
serverId: t.serverId,
|
||||||
|
serverName: t.serverName,
|
||||||
|
toolName: t.toolName,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
const isMcpDeployed = mcpToolDeployments.length > 0
|
||||||
|
|
||||||
|
// Build result
|
||||||
|
const deploymentTypes: string[] = []
|
||||||
|
if (isApiDeployed) deploymentTypes.push('api')
|
||||||
|
if (isChatDeployed) deploymentTypes.push('chat')
|
||||||
|
if (isMcpDeployed) deploymentTypes.push('mcp')
|
||||||
|
|
||||||
|
const isDeployed = isApiDeployed || isChatDeployed || isMcpDeployed
|
||||||
|
|
||||||
|
// Build summary message
|
||||||
|
let message = ''
|
||||||
|
if (!isDeployed) {
|
||||||
|
message = 'Workflow is not deployed'
|
||||||
|
} else {
|
||||||
|
const parts: string[] = []
|
||||||
|
if (isApiDeployed) parts.push('API')
|
||||||
|
if (isChatDeployed) parts.push(`Chat (${chatDeploy?.identifier})`)
|
||||||
|
if (isMcpDeployed) {
|
||||||
|
const serverNames = [...new Set(mcpToolDeployments.map((d) => d.serverName))].join(', ')
|
||||||
|
parts.push(`MCP (${serverNames})`)
|
||||||
|
}
|
||||||
|
message = `Workflow is deployed as: ${parts.join(', ')}`
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Checked deployment status', { workflowId, isDeployed, deploymentTypes })
|
||||||
|
|
||||||
|
return CheckDeploymentStatusResult.parse({
|
||||||
|
isDeployed,
|
||||||
|
deploymentTypes,
|
||||||
|
api: {
|
||||||
|
isDeployed: isApiDeployed,
|
||||||
|
deployedAt: apiDeploy?.createdAt?.toISOString() || null,
|
||||||
|
endpoint: isApiDeployed ? `${appUrl}/api/workflows/${workflowId}/execute` : null,
|
||||||
|
},
|
||||||
|
chat: {
|
||||||
|
isDeployed: isChatDeployed,
|
||||||
|
chatId: chatDeploy?.id || null,
|
||||||
|
identifier: chatDeploy?.identifier || null,
|
||||||
|
chatUrl: isChatDeployed ? `${appUrl}/chat/${chatDeploy?.identifier}` : null,
|
||||||
|
title: chatDeploy?.title || null,
|
||||||
|
},
|
||||||
|
mcp: {
|
||||||
|
isDeployed: isMcpDeployed,
|
||||||
|
servers: mcpToolDeployments,
|
||||||
|
},
|
||||||
|
message,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,73 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflowMcpServer } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('CreateWorkspaceMcpServerServerTool')
|
||||||
|
|
||||||
|
export const CreateWorkspaceMcpServerInput = z.object({
|
||||||
|
name: z.string().min(1),
|
||||||
|
description: z.string().optional(),
|
||||||
|
workspaceId: z.string().min(1),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const CreateWorkspaceMcpServerResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
serverId: z.string().nullable(),
|
||||||
|
serverName: z.string().nullable(),
|
||||||
|
description: z.string().nullable(),
|
||||||
|
message: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type CreateWorkspaceMcpServerInputType = z.infer<typeof CreateWorkspaceMcpServerInput>
|
||||||
|
export type CreateWorkspaceMcpServerResultType = z.infer<typeof CreateWorkspaceMcpServerResult>
|
||||||
|
|
||||||
|
export const createWorkspaceMcpServerServerTool: BaseServerTool<
|
||||||
|
CreateWorkspaceMcpServerInputType,
|
||||||
|
CreateWorkspaceMcpServerResultType
|
||||||
|
> = {
|
||||||
|
name: 'create_workspace_mcp_server',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = CreateWorkspaceMcpServerInput.parse(args)
|
||||||
|
const { name, description, workspaceId } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Creating workspace MCP server', { name, workspaceId })
|
||||||
|
|
||||||
|
// Check if server with same name already exists
|
||||||
|
const existing = await db
|
||||||
|
.select({ id: workflowMcpServer.id })
|
||||||
|
.from(workflowMcpServer)
|
||||||
|
.where(eq(workflowMcpServer.workspaceId, workspaceId))
|
||||||
|
.limit(100)
|
||||||
|
|
||||||
|
// Generate unique ID
|
||||||
|
const serverId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
await db.insert(workflowMcpServer).values({
|
||||||
|
id: serverId,
|
||||||
|
workspaceId,
|
||||||
|
createdBy: context.userId,
|
||||||
|
name: name.trim(),
|
||||||
|
description: description?.trim() || null,
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created MCP server', { serverId, name })
|
||||||
|
|
||||||
|
return CreateWorkspaceMcpServerResult.parse({
|
||||||
|
success: true,
|
||||||
|
serverId,
|
||||||
|
serverName: name.trim(),
|
||||||
|
description: description?.trim() || null,
|
||||||
|
message: `MCP server "${name}" created successfully. You can now deploy workflows to it using deploy_mcp.`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
152
apps/sim/lib/copilot/tools/server/workflow/deploy-api.ts
Normal file
152
apps/sim/lib/copilot/tools/server/workflow/deploy-api.ts
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { apiKey, workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { desc, eq, or } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('DeployApiServerTool')
|
||||||
|
|
||||||
|
export const DeployApiInput = z.object({
|
||||||
|
action: z.enum(['deploy', 'undeploy']).default('deploy'),
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const DeployApiResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
action: z.string(),
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
deployedAt: z.string().nullable(),
|
||||||
|
endpoint: z.string().nullable(),
|
||||||
|
curlCommand: z.string().nullable(),
|
||||||
|
message: z.string(),
|
||||||
|
needsApiKey: z.boolean().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type DeployApiInputType = z.infer<typeof DeployApiInput>
|
||||||
|
export type DeployApiResultType = z.infer<typeof DeployApiResult>
|
||||||
|
|
||||||
|
export const deployApiServerTool: BaseServerTool<DeployApiInputType, DeployApiResultType> = {
|
||||||
|
name: 'deploy_api',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = DeployApiInput.parse(args)
|
||||||
|
const { action, workflowId } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Deploy API', { action, workflowId })
|
||||||
|
|
||||||
|
// Get workflow info
|
||||||
|
const [wf] = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = wf.workspaceId
|
||||||
|
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
// Deactivate all deployment versions
|
||||||
|
await db
|
||||||
|
.update(workflowDeploymentVersion)
|
||||||
|
.set({ isActive: false })
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
|
||||||
|
logger.info('Workflow undeployed', { workflowId })
|
||||||
|
|
||||||
|
return DeployApiResult.parse({
|
||||||
|
success: true,
|
||||||
|
action: 'undeploy',
|
||||||
|
isDeployed: false,
|
||||||
|
deployedAt: null,
|
||||||
|
endpoint: null,
|
||||||
|
curlCommand: null,
|
||||||
|
message: 'Workflow undeployed successfully.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deploy action - check if user has API keys
|
||||||
|
const keys = await db
|
||||||
|
.select({ id: apiKey.id })
|
||||||
|
.from(apiKey)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
eq(apiKey.userId, context.userId),
|
||||||
|
workspaceId ? eq(apiKey.workspaceId, workspaceId) : undefined
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (keys.length === 0) {
|
||||||
|
return DeployApiResult.parse({
|
||||||
|
success: false,
|
||||||
|
action: 'deploy',
|
||||||
|
isDeployed: false,
|
||||||
|
deployedAt: null,
|
||||||
|
endpoint: null,
|
||||||
|
curlCommand: null,
|
||||||
|
message:
|
||||||
|
'Cannot deploy without an API key. Please create an API key in settings first, then try deploying again.',
|
||||||
|
needsApiKey: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current max version
|
||||||
|
const [maxVersion] = await db
|
||||||
|
.select({ version: workflowDeploymentVersion.version })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
.orderBy(desc(workflowDeploymentVersion.version))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const newVersion = (maxVersion?.version || 0) + 1
|
||||||
|
|
||||||
|
// Deactivate all existing versions
|
||||||
|
await db
|
||||||
|
.update(workflowDeploymentVersion)
|
||||||
|
.set({ isActive: false })
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
|
||||||
|
// Create new deployment version
|
||||||
|
const deploymentId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
// Load workflow state from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
const workflowState = {
|
||||||
|
blocks: normalizedData?.blocks || {},
|
||||||
|
edges: normalizedData?.edges || [],
|
||||||
|
loops: normalizedData?.loops || {},
|
||||||
|
parallels: normalizedData?.parallels || {},
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(workflowDeploymentVersion).values({
|
||||||
|
id: deploymentId,
|
||||||
|
workflowId,
|
||||||
|
version: newVersion,
|
||||||
|
state: workflowState,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Build API info
|
||||||
|
const appUrl = process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'
|
||||||
|
const apiEndpoint = `${appUrl}/api/workflows/${workflowId}/execute`
|
||||||
|
const curlCommand = `curl -X POST -H "X-API-Key: $SIM_API_KEY" -H "Content-Type: application/json" ${apiEndpoint}`
|
||||||
|
|
||||||
|
logger.info('Workflow deployed as API', { workflowId, version: newVersion })
|
||||||
|
|
||||||
|
return DeployApiResult.parse({
|
||||||
|
success: true,
|
||||||
|
action: 'deploy',
|
||||||
|
isDeployed: true,
|
||||||
|
deployedAt: now.toISOString(),
|
||||||
|
endpoint: apiEndpoint,
|
||||||
|
curlCommand,
|
||||||
|
message: 'Workflow deployed successfully as API. You can now call it via REST.',
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
274
apps/sim/lib/copilot/tools/server/workflow/deploy-chat.ts
Normal file
274
apps/sim/lib/copilot/tools/server/workflow/deploy-chat.ts
Normal file
@@ -0,0 +1,274 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { chat, workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, desc, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('DeployChatServerTool')
|
||||||
|
|
||||||
|
const OutputConfigSchema = z.object({
|
||||||
|
blockId: z.string(),
|
||||||
|
path: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const DeployChatInput = z.object({
|
||||||
|
action: z.enum(['deploy', 'undeploy']).default('deploy'),
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
identifier: z.string().optional(),
|
||||||
|
title: z.string().optional(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
authType: z.enum(['public', 'password', 'email', 'sso']).optional().default('public'),
|
||||||
|
password: z.string().optional(),
|
||||||
|
allowedEmails: z.array(z.string()).optional(),
|
||||||
|
welcomeMessage: z.string().optional(),
|
||||||
|
outputConfigs: z.array(OutputConfigSchema).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const DeployChatResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
action: z.string(),
|
||||||
|
isDeployed: z.boolean(),
|
||||||
|
chatId: z.string().nullable(),
|
||||||
|
chatUrl: z.string().nullable(),
|
||||||
|
identifier: z.string().nullable(),
|
||||||
|
title: z.string().nullable(),
|
||||||
|
authType: z.string().nullable(),
|
||||||
|
message: z.string(),
|
||||||
|
error: z.string().optional(),
|
||||||
|
errorCode: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type DeployChatInputType = z.infer<typeof DeployChatInput>
|
||||||
|
export type DeployChatResultType = z.infer<typeof DeployChatResult>
|
||||||
|
|
||||||
|
function generateIdentifier(workflowName: string): string {
|
||||||
|
return workflowName
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9]+/g, '-')
|
||||||
|
.replace(/^-|-$/g, '')
|
||||||
|
.substring(0, 50)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const deployChatServerTool: BaseServerTool<DeployChatInputType, DeployChatResultType> = {
|
||||||
|
name: 'deploy_chat',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = DeployChatInput.parse(args)
|
||||||
|
const { action, workflowId, authType = 'public' } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Deploy Chat', { action, workflowId })
|
||||||
|
|
||||||
|
// Get workflow info
|
||||||
|
const [wf] = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const appUrl = process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'
|
||||||
|
|
||||||
|
// Check for existing deployment
|
||||||
|
const [existingChat] = await db
|
||||||
|
.select()
|
||||||
|
.from(chat)
|
||||||
|
.where(and(eq(chat.workflowId, workflowId), eq(chat.isActive, true)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
if (!existingChat) {
|
||||||
|
return DeployChatResult.parse({
|
||||||
|
success: false,
|
||||||
|
action: 'undeploy',
|
||||||
|
isDeployed: false,
|
||||||
|
chatId: null,
|
||||||
|
chatUrl: null,
|
||||||
|
identifier: null,
|
||||||
|
title: null,
|
||||||
|
authType: null,
|
||||||
|
message: 'No active chat deployment found for this workflow',
|
||||||
|
error: 'No active chat deployment found',
|
||||||
|
errorCode: 'VALIDATION_ERROR',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deactivate the chat deployment
|
||||||
|
await db.update(chat).set({ isActive: false }).where(eq(chat.id, existingChat.id))
|
||||||
|
|
||||||
|
logger.info('Chat undeployed', { workflowId, chatId: existingChat.id })
|
||||||
|
|
||||||
|
return DeployChatResult.parse({
|
||||||
|
success: true,
|
||||||
|
action: 'undeploy',
|
||||||
|
isDeployed: false,
|
||||||
|
chatId: null,
|
||||||
|
chatUrl: null,
|
||||||
|
identifier: null,
|
||||||
|
title: null,
|
||||||
|
authType: null,
|
||||||
|
message: 'Chat deployment removed successfully.',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deploy action
|
||||||
|
const identifier =
|
||||||
|
parsed.identifier || existingChat?.identifier || generateIdentifier(wf.name || 'chat')
|
||||||
|
const title = parsed.title || existingChat?.title || wf.name || 'Chat'
|
||||||
|
const description = parsed.description ?? existingChat?.description ?? ''
|
||||||
|
const welcomeMessage =
|
||||||
|
parsed.welcomeMessage ||
|
||||||
|
(existingChat?.customizations as any)?.welcomeMessage ||
|
||||||
|
'Hi there! How can I help you today?'
|
||||||
|
const primaryColor =
|
||||||
|
(existingChat?.customizations as any)?.primaryColor || 'var(--brand-primary-hover-hex)'
|
||||||
|
const existingAllowedEmails = Array.isArray(existingChat?.allowedEmails)
|
||||||
|
? existingChat.allowedEmails
|
||||||
|
: []
|
||||||
|
const allowedEmails = parsed.allowedEmails || existingAllowedEmails
|
||||||
|
const outputConfigs = parsed.outputConfigs || existingChat?.outputConfigs || []
|
||||||
|
|
||||||
|
// Validate requirements
|
||||||
|
if (authType === 'password' && !parsed.password && !existingChat?.password) {
|
||||||
|
throw new Error('Password is required when using password protection')
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((authType === 'email' || authType === 'sso') && allowedEmails.length === 0) {
|
||||||
|
throw new Error(`At least one email or domain is required when using ${authType} access`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if identifier is already in use by another workflow
|
||||||
|
if (!existingChat) {
|
||||||
|
const [existingIdentifier] = await db
|
||||||
|
.select({ id: chat.id })
|
||||||
|
.from(chat)
|
||||||
|
.where(and(eq(chat.identifier, identifier), eq(chat.isActive, true)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existingIdentifier) {
|
||||||
|
return DeployChatResult.parse({
|
||||||
|
success: false,
|
||||||
|
action: 'deploy',
|
||||||
|
isDeployed: false,
|
||||||
|
chatId: null,
|
||||||
|
chatUrl: null,
|
||||||
|
identifier,
|
||||||
|
title: null,
|
||||||
|
authType: null,
|
||||||
|
message: `The identifier "${identifier}" is already in use. Please choose a different one.`,
|
||||||
|
error: `Identifier "${identifier}" is already taken`,
|
||||||
|
errorCode: 'IDENTIFIER_TAKEN',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure workflow is deployed as API first
|
||||||
|
const [deployment] = await db
|
||||||
|
.select({ id: workflowDeploymentVersion.id })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||||
|
eq(workflowDeploymentVersion.isActive, true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!deployment) {
|
||||||
|
// Auto-deploy the API
|
||||||
|
const [maxVersion] = await db
|
||||||
|
.select({ version: workflowDeploymentVersion.version })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
.orderBy(desc(workflowDeploymentVersion.version))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const newVersion = (maxVersion?.version || 0) + 1
|
||||||
|
const deploymentId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
// Load workflow state from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
const workflowState = {
|
||||||
|
blocks: normalizedData?.blocks || {},
|
||||||
|
edges: normalizedData?.edges || [],
|
||||||
|
loops: normalizedData?.loops || {},
|
||||||
|
parallels: normalizedData?.parallels || {},
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(workflowDeploymentVersion).values({
|
||||||
|
id: deploymentId,
|
||||||
|
workflowId,
|
||||||
|
version: newVersion,
|
||||||
|
state: workflowState,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Auto-deployed API for chat', { workflowId, version: newVersion })
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = new Date()
|
||||||
|
let chatId: string
|
||||||
|
|
||||||
|
if (existingChat) {
|
||||||
|
// Update existing deployment
|
||||||
|
await db
|
||||||
|
.update(chat)
|
||||||
|
.set({
|
||||||
|
identifier: identifier.trim(),
|
||||||
|
title: title.trim(),
|
||||||
|
description: description.trim(),
|
||||||
|
authType,
|
||||||
|
password: authType === 'password' ? parsed.password || existingChat.password : null,
|
||||||
|
allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [],
|
||||||
|
customizations: { primaryColor, welcomeMessage: welcomeMessage.trim() },
|
||||||
|
outputConfigs,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
.where(eq(chat.id, existingChat.id))
|
||||||
|
|
||||||
|
chatId = existingChat.id
|
||||||
|
logger.info('Updated chat deployment', { chatId })
|
||||||
|
} else {
|
||||||
|
// Create new deployment
|
||||||
|
chatId = crypto.randomUUID()
|
||||||
|
|
||||||
|
await db.insert(chat).values({
|
||||||
|
id: chatId,
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
identifier: identifier.trim(),
|
||||||
|
title: title.trim(),
|
||||||
|
description: description.trim(),
|
||||||
|
authType,
|
||||||
|
password: authType === 'password' ? parsed.password : null,
|
||||||
|
allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [],
|
||||||
|
customizations: { primaryColor, welcomeMessage: welcomeMessage.trim() },
|
||||||
|
outputConfigs,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created chat deployment', { chatId })
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatUrl = `${appUrl}/chat/${identifier}`
|
||||||
|
|
||||||
|
return DeployChatResult.parse({
|
||||||
|
success: true,
|
||||||
|
action: 'deploy',
|
||||||
|
isDeployed: true,
|
||||||
|
chatId,
|
||||||
|
chatUrl,
|
||||||
|
identifier,
|
||||||
|
title,
|
||||||
|
authType,
|
||||||
|
message: `Chat deployed successfully! Available at: ${chatUrl}`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
166
apps/sim/lib/copilot/tools/server/workflow/deploy-mcp.ts
Normal file
166
apps/sim/lib/copilot/tools/server/workflow/deploy-mcp.ts
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import {
|
||||||
|
workflow,
|
||||||
|
workflowDeploymentVersion,
|
||||||
|
workflowMcpServer,
|
||||||
|
workflowMcpTool,
|
||||||
|
} from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('DeployMcpServerTool')
|
||||||
|
|
||||||
|
const ParameterDescriptionSchema = z.object({
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const DeployMcpInput = z.object({
|
||||||
|
serverId: z.string().min(1),
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
toolName: z.string().optional(),
|
||||||
|
toolDescription: z.string().optional(),
|
||||||
|
parameterDescriptions: z.array(ParameterDescriptionSchema).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const DeployMcpResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
toolId: z.string().nullable(),
|
||||||
|
toolName: z.string().nullable(),
|
||||||
|
toolDescription: z.string().nullable(),
|
||||||
|
serverId: z.string().nullable(),
|
||||||
|
updated: z.boolean().optional(),
|
||||||
|
message: z.string(),
|
||||||
|
error: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type DeployMcpInputType = z.infer<typeof DeployMcpInput>
|
||||||
|
export type DeployMcpResultType = z.infer<typeof DeployMcpResult>
|
||||||
|
|
||||||
|
export const deployMcpServerTool: BaseServerTool<DeployMcpInputType, DeployMcpResultType> = {
|
||||||
|
name: 'deploy_mcp',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = DeployMcpInput.parse(args)
|
||||||
|
const { serverId, workflowId, toolName, toolDescription, parameterDescriptions } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Deploy MCP', { serverId, workflowId })
|
||||||
|
|
||||||
|
// Get workflow info
|
||||||
|
const [wf] = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if server exists
|
||||||
|
const [server] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowMcpServer)
|
||||||
|
.where(eq(workflowMcpServer.id, serverId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!server) {
|
||||||
|
throw new Error(
|
||||||
|
'MCP server not found. Use list_workspace_mcp_servers to see available servers.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if workflow is deployed as API
|
||||||
|
const [deployment] = await db
|
||||||
|
.select({ id: workflowDeploymentVersion.id })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||||
|
eq(workflowDeploymentVersion.isActive, true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!deployment) {
|
||||||
|
throw new Error(
|
||||||
|
'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build parameter schema if provided
|
||||||
|
let parameterSchema: Record<string, unknown> | null = null
|
||||||
|
if (parameterDescriptions && parameterDescriptions.length > 0) {
|
||||||
|
const properties: Record<string, { description: string }> = {}
|
||||||
|
for (const param of parameterDescriptions) {
|
||||||
|
properties[param.name] = { description: param.description }
|
||||||
|
}
|
||||||
|
parameterSchema = { properties }
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalToolName = toolName?.trim() || wf.name || 'workflow'
|
||||||
|
const finalToolDescription = toolDescription?.trim() || null
|
||||||
|
|
||||||
|
// Check if tool already exists for this workflow on this server
|
||||||
|
const [existingTool] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(
|
||||||
|
and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
if (existingTool) {
|
||||||
|
// Update existing tool
|
||||||
|
await db
|
||||||
|
.update(workflowMcpTool)
|
||||||
|
.set({
|
||||||
|
toolName: finalToolName,
|
||||||
|
toolDescription: finalToolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
.where(eq(workflowMcpTool.id, existingTool.id))
|
||||||
|
|
||||||
|
logger.info('Updated MCP tool', { toolId: existingTool.id, toolName: finalToolName })
|
||||||
|
|
||||||
|
return DeployMcpResult.parse({
|
||||||
|
success: true,
|
||||||
|
toolId: existingTool.id,
|
||||||
|
toolName: finalToolName,
|
||||||
|
toolDescription: finalToolDescription,
|
||||||
|
serverId,
|
||||||
|
updated: true,
|
||||||
|
message: `Workflow MCP tool updated to "${finalToolName}".`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new tool
|
||||||
|
const toolId = crypto.randomUUID()
|
||||||
|
|
||||||
|
await db.insert(workflowMcpTool).values({
|
||||||
|
id: toolId,
|
||||||
|
serverId,
|
||||||
|
workflowId,
|
||||||
|
toolName: finalToolName,
|
||||||
|
toolDescription: finalToolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created MCP tool', { toolId, toolName: finalToolName })
|
||||||
|
|
||||||
|
return DeployMcpResult.parse({
|
||||||
|
success: true,
|
||||||
|
toolId,
|
||||||
|
toolName: finalToolName,
|
||||||
|
toolDescription: finalToolDescription,
|
||||||
|
serverId,
|
||||||
|
message: `Workflow deployed as MCP tool "${finalToolName}" to server.`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -8,7 +8,10 @@ import { validateSelectorIds } from '@/lib/copilot/validation/selector-validator
|
|||||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
import {
|
||||||
|
loadWorkflowFromNormalizedTables,
|
||||||
|
saveWorkflowToNormalizedTables,
|
||||||
|
} from '@/lib/workflows/persistence/utils'
|
||||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||||
import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/visibility'
|
import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/visibility'
|
||||||
@@ -2626,13 +2629,22 @@ async function getCurrentWorkflowStateFromDb(
|
|||||||
|
|
||||||
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||||
name: 'edit_workflow',
|
name: 'edit_workflow',
|
||||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<any> {
|
async execute(
|
||||||
|
params: EditWorkflowParams,
|
||||||
|
context?: { userId: string; workflowId?: string }
|
||||||
|
): Promise<any> {
|
||||||
const logger = createLogger('EditWorkflowServerTool')
|
const logger = createLogger('EditWorkflowServerTool')
|
||||||
const { operations, workflowId, currentUserWorkflow } = params
|
const { operations, currentUserWorkflow } = params
|
||||||
|
// Use workflowId from params if provided, otherwise fall back to context
|
||||||
|
const workflowId = params.workflowId || context?.workflowId
|
||||||
if (!Array.isArray(operations) || operations.length === 0) {
|
if (!Array.isArray(operations) || operations.length === 0) {
|
||||||
throw new Error('operations are required and must be an array')
|
throw new Error('operations are required and must be an array')
|
||||||
}
|
}
|
||||||
if (!workflowId) throw new Error('workflowId is required')
|
if (!workflowId) {
|
||||||
|
throw new Error(
|
||||||
|
'No workflow specified. Please provide a workflowId or ensure you have an active workflow open.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Executing edit_workflow', {
|
logger.info('Executing edit_workflow', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
@@ -2737,10 +2749,66 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
|||||||
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('edit_workflow successfully applied operations', {
|
// Prepare the final workflow state for persistence
|
||||||
|
const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState
|
||||||
|
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
// PERSIST THE CHANGES TO THE DATABASE
|
||||||
|
// This is critical for headless mode and ensures changes are saved
|
||||||
|
// ─────────────────────────────────────────────────────────────────────────
|
||||||
|
const workflowStateForPersistence = {
|
||||||
|
blocks: finalWorkflowState.blocks,
|
||||||
|
edges: finalWorkflowState.edges,
|
||||||
|
loops: finalWorkflowState.loops || {},
|
||||||
|
parallels: finalWorkflowState.parallels || {},
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowStateForPersistence)
|
||||||
|
|
||||||
|
if (!saveResult.success) {
|
||||||
|
logger.error('Failed to persist workflow changes to database', {
|
||||||
|
workflowId,
|
||||||
|
error: saveResult.error,
|
||||||
|
})
|
||||||
|
throw new Error(`Failed to save workflow: ${saveResult.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow's lastSynced timestamp
|
||||||
|
await db
|
||||||
|
.update(workflowTable)
|
||||||
|
.set({
|
||||||
|
lastSynced: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
|
||||||
|
// Notify socket server so connected clients can refresh
|
||||||
|
// This uses the copilot-specific endpoint to trigger UI refresh
|
||||||
|
try {
|
||||||
|
const socketUrl = process.env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||||
|
const operationsSummary = operations
|
||||||
|
.map((op: any) => `${op.operation_type} ${op.block_id || 'block'}`)
|
||||||
|
.slice(0, 3)
|
||||||
|
.join(', ')
|
||||||
|
const description = `Applied ${operations.length} operation(s): ${operationsSummary}${operations.length > 3 ? '...' : ''}`
|
||||||
|
|
||||||
|
await fetch(`${socketUrl}/api/copilot-workflow-edit`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ workflowId, description }),
|
||||||
|
}).catch((err) => {
|
||||||
|
logger.warn('Failed to notify socket server about copilot edit', { error: err.message })
|
||||||
|
})
|
||||||
|
} catch (notifyError) {
|
||||||
|
// Non-fatal - log and continue
|
||||||
|
logger.warn('Error notifying socket server', { error: notifyError })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('edit_workflow successfully applied and persisted operations', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
blocksCount: Object.keys(finalWorkflowState.blocks).length,
|
||||||
edgesCount: modifiedWorkflowState.edges.length,
|
edgesCount: finalWorkflowState.edges.length,
|
||||||
inputValidationErrors: validationErrors.length,
|
inputValidationErrors: validationErrors.length,
|
||||||
skippedItemsCount: skippedItems.length,
|
skippedItemsCount: skippedItems.length,
|
||||||
schemaValidationErrors: validation.errors.length,
|
schemaValidationErrors: validation.errors.length,
|
||||||
@@ -2760,7 +2828,7 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
|||||||
// Return the modified workflow state for the client to convert to YAML if needed
|
// Return the modified workflow state for the client to convert to YAML if needed
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
workflowState: validation.sanitizedState || modifiedWorkflowState,
|
workflowState: finalWorkflowState,
|
||||||
// Include input validation errors so the LLM can see what was rejected
|
// Include input validation errors so the LLM can see what was rejected
|
||||||
...(inputErrors && {
|
...(inputErrors && {
|
||||||
inputValidationErrors: inputErrors,
|
inputValidationErrors: inputErrors,
|
||||||
|
|||||||
181
apps/sim/lib/copilot/tools/server/workflow/get-block-outputs.ts
Normal file
181
apps/sim/lib/copilot/tools/server/workflow/get-block-outputs.ts
Normal file
@@ -0,0 +1,181 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { normalizeName } from '@/executor/constants'
|
||||||
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
|
import type { BaseServerTool } from '../base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('GetBlockOutputsServerTool')
|
||||||
|
|
||||||
|
export const GetBlockOutputsInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
blockIds: z.array(z.string()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const BlockOutputSchema = z.object({
|
||||||
|
blockId: z.string(),
|
||||||
|
blockName: z.string(),
|
||||||
|
blockType: z.string(),
|
||||||
|
triggerMode: z.boolean().optional(),
|
||||||
|
outputs: z.array(z.string()),
|
||||||
|
insideSubflowOutputs: z.array(z.string()).optional(),
|
||||||
|
outsideSubflowOutputs: z.array(z.string()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
const VariableOutputSchema = z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
tag: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const GetBlockOutputsResult = z.object({
|
||||||
|
blocks: z.array(BlockOutputSchema),
|
||||||
|
variables: z.array(VariableOutputSchema).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type GetBlockOutputsInputType = z.infer<typeof GetBlockOutputsInput>
|
||||||
|
export type GetBlockOutputsResultType = z.infer<typeof GetBlockOutputsResult>
|
||||||
|
|
||||||
|
interface Variable {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatOutputsWithPrefix(paths: string[], blockName: string): string[] {
|
||||||
|
const normalizedName = normalizeName(blockName)
|
||||||
|
return paths.map((path) => `${normalizedName}.${path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSubflowInsidePaths(
|
||||||
|
blockType: 'loop' | 'parallel',
|
||||||
|
blockId: string,
|
||||||
|
loops: Record<string, Loop>,
|
||||||
|
parallels: Record<string, Parallel>
|
||||||
|
): string[] {
|
||||||
|
const paths = ['index']
|
||||||
|
if (blockType === 'loop') {
|
||||||
|
const loopType = loops[blockId]?.loopType || 'for'
|
||||||
|
if (loopType === 'forEach') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const parallelType = parallels[blockId]?.parallelType || 'count'
|
||||||
|
if (parallelType === 'collection') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getBlockOutputsServerTool: BaseServerTool<
|
||||||
|
GetBlockOutputsInputType,
|
||||||
|
GetBlockOutputsResultType
|
||||||
|
> = {
|
||||||
|
name: 'get_block_outputs',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = GetBlockOutputsInput.parse(args)
|
||||||
|
const { workflowId, blockIds } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Getting block outputs', { workflowId, blockIds })
|
||||||
|
|
||||||
|
// Load workflow from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
|
||||||
|
if (!normalizedData?.blocks) {
|
||||||
|
throw new Error('Workflow state is empty or invalid')
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalizedData.blocks
|
||||||
|
const loops = normalizedData.loops || {}
|
||||||
|
const parallels = normalizedData.parallels || {}
|
||||||
|
|
||||||
|
const targetBlockIds = blockIds && blockIds.length > 0 ? blockIds : Object.keys(blocks)
|
||||||
|
|
||||||
|
const blockOutputs: GetBlockOutputsResultType['blocks'] = []
|
||||||
|
|
||||||
|
for (const blockId of targetBlockIds) {
|
||||||
|
const block = blocks[blockId]
|
||||||
|
if (!block?.type) continue
|
||||||
|
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
|
||||||
|
const blockOutput: GetBlockOutputsResultType['blocks'][0] = {
|
||||||
|
blockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: [],
|
||||||
|
}
|
||||||
|
|
||||||
|
// Include triggerMode if the block is in trigger mode
|
||||||
|
if (block.triggerMode) {
|
||||||
|
blockOutput.triggerMode = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const insidePaths = getSubflowInsidePaths(block.type, blockId, loops, parallels)
|
||||||
|
blockOutput.insideSubflowOutputs = formatOutputsWithPrefix(insidePaths, blockName)
|
||||||
|
blockOutput.outsideSubflowOutputs = formatOutputsWithPrefix(['results'], blockName)
|
||||||
|
} else {
|
||||||
|
// Compute output paths using the block's subBlocks
|
||||||
|
const outputPaths = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
blockOutput.outputs = formatOutputsWithPrefix(outputPaths, blockName)
|
||||||
|
}
|
||||||
|
|
||||||
|
blockOutputs.push(blockOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get workflow variables if no specific blockIds requested
|
||||||
|
let variables: GetBlockOutputsResultType['variables'] | undefined
|
||||||
|
const includeVariables = !blockIds || blockIds.length === 0
|
||||||
|
|
||||||
|
if (includeVariables) {
|
||||||
|
// Get variables from workflow record
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const workflowVariables = wf?.variables as Record<string, Variable> | null
|
||||||
|
|
||||||
|
if (workflowVariables && typeof workflowVariables === 'object') {
|
||||||
|
variables = Object.values(workflowVariables)
|
||||||
|
.filter(
|
||||||
|
(v): v is Variable =>
|
||||||
|
typeof v === 'object' &&
|
||||||
|
v !== null &&
|
||||||
|
'name' in v &&
|
||||||
|
typeof v.name === 'string' &&
|
||||||
|
v.name.trim() !== ''
|
||||||
|
)
|
||||||
|
.map((variable) => ({
|
||||||
|
id: variable.id,
|
||||||
|
name: variable.name,
|
||||||
|
type: variable.type || 'string',
|
||||||
|
tag: `variable.${normalizeName(variable.name)}`,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Retrieved block outputs', {
|
||||||
|
workflowId,
|
||||||
|
blockCount: blockOutputs.length,
|
||||||
|
variableCount: variables?.length ?? 0,
|
||||||
|
})
|
||||||
|
|
||||||
|
return GetBlockOutputsResult.parse({
|
||||||
|
blocks: blockOutputs,
|
||||||
|
...(variables && { variables }),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,310 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { normalizeName } from '@/executor/constants'
|
||||||
|
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||||
|
import { getBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||||
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
|
import type { BaseServerTool } from '../base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('GetBlockUpstreamReferencesServerTool')
|
||||||
|
|
||||||
|
export const GetBlockUpstreamReferencesInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
blockIds: z.array(z.string()).min(1),
|
||||||
|
})
|
||||||
|
|
||||||
|
interface Variable {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BlockOutput {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
outputs: string[]
|
||||||
|
triggerMode?: boolean
|
||||||
|
accessContext?: 'inside' | 'outside'
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpstreamResult {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
accessibleBlocks: BlockOutput[]
|
||||||
|
variables: Array<{ id: string; name: string; type: string; tag: string }>
|
||||||
|
insideSubflows?: Array<{ blockId: string; blockName: string; blockType: string }>
|
||||||
|
}
|
||||||
|
|
||||||
|
const GetBlockUpstreamReferencesResult = z.object({
|
||||||
|
results: z.array(
|
||||||
|
z.object({
|
||||||
|
blockId: z.string(),
|
||||||
|
blockName: z.string(),
|
||||||
|
accessibleBlocks: z.array(
|
||||||
|
z.object({
|
||||||
|
blockId: z.string(),
|
||||||
|
blockName: z.string(),
|
||||||
|
blockType: z.string(),
|
||||||
|
outputs: z.array(z.string()),
|
||||||
|
triggerMode: z.boolean().optional(),
|
||||||
|
accessContext: z.enum(['inside', 'outside']).optional(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
variables: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
tag: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
insideSubflows: z
|
||||||
|
.array(
|
||||||
|
z.object({
|
||||||
|
blockId: z.string(),
|
||||||
|
blockName: z.string(),
|
||||||
|
blockType: z.string(),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
.optional(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
})
|
||||||
|
|
||||||
|
type GetBlockUpstreamReferencesResultType = z.infer<typeof GetBlockUpstreamReferencesResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format output paths with block name prefix
|
||||||
|
*/
|
||||||
|
function formatOutputsWithPrefix(outputPaths: string[], blockName: string): string[] {
|
||||||
|
const normalized = normalizeName(blockName)
|
||||||
|
return outputPaths.map((path) => `${normalized}.${path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get outputs for subflow from inside (loop item, parallel item, etc.)
|
||||||
|
*/
|
||||||
|
function getSubflowInsidePaths(
|
||||||
|
blockType: string,
|
||||||
|
blockId: string,
|
||||||
|
loops: Record<string, Loop>,
|
||||||
|
parallels: Record<string, Parallel>
|
||||||
|
): string[] {
|
||||||
|
if (blockType === 'loop') {
|
||||||
|
const loop = loops[blockId]
|
||||||
|
if (loop?.loopType === 'forEach') {
|
||||||
|
return ['item', 'index']
|
||||||
|
}
|
||||||
|
return ['index']
|
||||||
|
}
|
||||||
|
if (blockType === 'parallel') {
|
||||||
|
return ['item', 'index']
|
||||||
|
}
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getBlockUpstreamReferencesServerTool: BaseServerTool<
|
||||||
|
typeof GetBlockUpstreamReferencesInput,
|
||||||
|
GetBlockUpstreamReferencesResultType
|
||||||
|
> = {
|
||||||
|
name: 'get_block_upstream_references',
|
||||||
|
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = GetBlockUpstreamReferencesInput.parse(args)
|
||||||
|
const { workflowId, blockIds } = parsed
|
||||||
|
|
||||||
|
logger.info('Getting block upstream references', {
|
||||||
|
workflowId,
|
||||||
|
blockIds,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Load workflow from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
|
||||||
|
if (!normalizedData?.blocks) {
|
||||||
|
throw new Error('Workflow state is empty or invalid')
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalizedData.blocks
|
||||||
|
const edges = normalizedData.edges || []
|
||||||
|
const loops = (normalizedData.loops || {}) as Record<string, Loop>
|
||||||
|
const parallels = (normalizedData.parallels || {}) as Record<string, Parallel>
|
||||||
|
|
||||||
|
// Get workflow variables
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const workflowVariables = wf?.variables as Record<string, Variable> | null
|
||||||
|
let variables: Array<{ id: string; name: string; type: string; tag: string }> = []
|
||||||
|
|
||||||
|
if (workflowVariables && typeof workflowVariables === 'object') {
|
||||||
|
variables = Object.values(workflowVariables)
|
||||||
|
.filter(
|
||||||
|
(v): v is Variable =>
|
||||||
|
typeof v === 'object' &&
|
||||||
|
v !== null &&
|
||||||
|
'name' in v &&
|
||||||
|
typeof v.name === 'string' &&
|
||||||
|
v.name.trim() !== ''
|
||||||
|
)
|
||||||
|
.map((variable) => ({
|
||||||
|
id: variable.id,
|
||||||
|
name: variable.name,
|
||||||
|
type: variable.type || 'string',
|
||||||
|
tag: `variable.${normalizeName(variable.name)}`,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build graph edges for path calculation
|
||||||
|
const graphEdges = edges.map((edge: { source: string; target: string }) => ({
|
||||||
|
source: edge.source,
|
||||||
|
target: edge.target,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const results: UpstreamResult[] = []
|
||||||
|
|
||||||
|
for (const blockId of blockIds) {
|
||||||
|
const targetBlock = blocks[blockId]
|
||||||
|
if (!targetBlock) {
|
||||||
|
logger.warn(`Block ${blockId} not found`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }> = []
|
||||||
|
const containingLoopIds = new Set<string>()
|
||||||
|
const containingParallelIds = new Set<string>()
|
||||||
|
|
||||||
|
// Find containing loops
|
||||||
|
Object.values(loops).forEach((loop) => {
|
||||||
|
if (loop?.nodes?.includes(blockId)) {
|
||||||
|
containingLoopIds.add(loop.id)
|
||||||
|
const loopBlock = blocks[loop.id]
|
||||||
|
if (loopBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: loop.id,
|
||||||
|
blockName: loopBlock.name || loopBlock.type,
|
||||||
|
blockType: 'loop',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Find containing parallels
|
||||||
|
Object.values(parallels).forEach((parallel) => {
|
||||||
|
if (parallel?.nodes?.includes(blockId)) {
|
||||||
|
containingParallelIds.add(parallel.id)
|
||||||
|
const parallelBlock = blocks[parallel.id]
|
||||||
|
if (parallelBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: parallel.id,
|
||||||
|
blockName: parallelBlock.name || parallelBlock.type,
|
||||||
|
blockType: 'parallel',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Find all ancestor blocks using path calculator
|
||||||
|
const ancestorIds = BlockPathCalculator.findAllPathNodes(graphEdges, blockId)
|
||||||
|
const accessibleIds = new Set<string>(ancestorIds)
|
||||||
|
accessibleIds.add(blockId)
|
||||||
|
|
||||||
|
// Include starter block if it's an ancestor
|
||||||
|
const starterBlock = Object.values(blocks).find((b: any) =>
|
||||||
|
isInputDefinitionTrigger(b.type)
|
||||||
|
)
|
||||||
|
if (starterBlock && ancestorIds.includes((starterBlock as any).id)) {
|
||||||
|
accessibleIds.add((starterBlock as any).id)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add all nodes in containing loops/parallels
|
||||||
|
containingLoopIds.forEach((loopId) => {
|
||||||
|
accessibleIds.add(loopId)
|
||||||
|
loops[loopId]?.nodes?.forEach((nodeId) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
containingParallelIds.forEach((parallelId) => {
|
||||||
|
accessibleIds.add(parallelId)
|
||||||
|
parallels[parallelId]?.nodes?.forEach((nodeId) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
const accessibleBlocks: BlockOutput[] = []
|
||||||
|
|
||||||
|
for (const accessibleBlockId of accessibleIds) {
|
||||||
|
const block = blocks[accessibleBlockId] as any
|
||||||
|
if (!block?.type) continue
|
||||||
|
|
||||||
|
// Skip self-reference unless it's a special block type
|
||||||
|
const canSelfReference = block.type === 'approval' || block.type === 'human_in_the_loop'
|
||||||
|
if (accessibleBlockId === blockId && !canSelfReference) continue
|
||||||
|
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
let accessContext: 'inside' | 'outside' | undefined
|
||||||
|
let outputPaths: string[]
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const isInside =
|
||||||
|
(block.type === 'loop' && containingLoopIds.has(accessibleBlockId)) ||
|
||||||
|
(block.type === 'parallel' && containingParallelIds.has(accessibleBlockId))
|
||||||
|
|
||||||
|
accessContext = isInside ? 'inside' : 'outside'
|
||||||
|
outputPaths = isInside
|
||||||
|
? getSubflowInsidePaths(block.type, accessibleBlockId, loops, parallels)
|
||||||
|
: ['results']
|
||||||
|
} else {
|
||||||
|
outputPaths = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
const formattedOutputs = formatOutputsWithPrefix(outputPaths, blockName)
|
||||||
|
|
||||||
|
const entry: BlockOutput = {
|
||||||
|
blockId: accessibleBlockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: formattedOutputs,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (block.triggerMode) {
|
||||||
|
entry.triggerMode = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accessContext) {
|
||||||
|
entry.accessContext = accessContext
|
||||||
|
}
|
||||||
|
|
||||||
|
accessibleBlocks.push(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
const resultEntry: UpstreamResult = {
|
||||||
|
blockId,
|
||||||
|
blockName: targetBlock.name || targetBlock.type,
|
||||||
|
accessibleBlocks,
|
||||||
|
variables,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (insideSubflows.length > 0) {
|
||||||
|
resultEntry.insideSubflows = insideSubflows
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push(resultEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = GetBlockUpstreamReferencesResult.parse({ results })
|
||||||
|
|
||||||
|
logger.info('Retrieved upstream references', {
|
||||||
|
blockIds,
|
||||||
|
resultCount: results.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||||
|
|
||||||
|
const logger = createLogger('GetUserWorkflowServerTool')
|
||||||
|
|
||||||
|
// workflowId is optional - if not provided, we use the one from execution context
|
||||||
|
export const GetUserWorkflowInput = z.object({
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const GetUserWorkflowResult = z.object({
|
||||||
|
userWorkflow: z.string(),
|
||||||
|
workflowId: z.string(),
|
||||||
|
workflowName: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type GetUserWorkflowInputType = z.infer<typeof GetUserWorkflowInput>
|
||||||
|
export type GetUserWorkflowResultType = z.infer<typeof GetUserWorkflowResult>
|
||||||
|
|
||||||
|
export const getUserWorkflowServerTool: BaseServerTool<
|
||||||
|
GetUserWorkflowInputType,
|
||||||
|
GetUserWorkflowResultType
|
||||||
|
> = {
|
||||||
|
name: 'get_user_workflow',
|
||||||
|
async execute(args: unknown, context?: { userId: string; workflowId?: string }) {
|
||||||
|
const parsed = GetUserWorkflowInput.parse(args)
|
||||||
|
// Use workflowId from args if provided, otherwise fall back to context
|
||||||
|
const workflowId = parsed.workflowId || context?.workflowId
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowId) {
|
||||||
|
throw new Error(
|
||||||
|
'No workflow specified. Please provide a workflowId or ensure you have an active workflow open.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Getting user workflow', { workflowId, fromContext: !parsed.workflowId })
|
||||||
|
|
||||||
|
// Get workflow metadata
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ id: workflow.id, name: workflow.name, userId: workflow.userId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load workflow from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
|
||||||
|
if (!normalizedData?.blocks || Object.keys(normalizedData.blocks).length === 0) {
|
||||||
|
throw new Error('Workflow state is empty or invalid')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build workflow state
|
||||||
|
const workflowState = {
|
||||||
|
blocks: normalizedData.blocks,
|
||||||
|
edges: normalizedData.edges || [],
|
||||||
|
loops: normalizedData.loops || {},
|
||||||
|
parallels: normalizedData.parallels || {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanitize for copilot (remove UI-specific data)
|
||||||
|
const sanitizedState = sanitizeForCopilot(workflowState as any)
|
||||||
|
const userWorkflow = JSON.stringify(sanitizedState, null, 2)
|
||||||
|
|
||||||
|
logger.info('Retrieved user workflow', {
|
||||||
|
workflowId,
|
||||||
|
workflowName: wf.name,
|
||||||
|
blockCount: Object.keys(normalizedData.blocks).length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return GetUserWorkflowResult.parse({
|
||||||
|
userWorkflow,
|
||||||
|
workflowId,
|
||||||
|
workflowName: wf.name || 'Untitled',
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
142
apps/sim/lib/copilot/tools/server/workflow/get-workflow-data.ts
Normal file
142
apps/sim/lib/copilot/tools/server/workflow/get-workflow-data.ts
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { customTools, mcpServers as mcpServersTable, workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { normalizeName } from '@/executor/constants'
|
||||||
|
import type { BaseServerTool } from '../base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('GetWorkflowDataServerTool')
|
||||||
|
|
||||||
|
export const GetWorkflowDataInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
data_type: z.enum(['global_variables', 'custom_tools', 'mcp_tools', 'files']),
|
||||||
|
})
|
||||||
|
|
||||||
|
interface Variable {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
value?: unknown
|
||||||
|
type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getWorkflowDataServerTool: BaseServerTool<typeof GetWorkflowDataInput, unknown> = {
|
||||||
|
name: 'get_workflow_data',
|
||||||
|
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = GetWorkflowDataInput.parse(args)
|
||||||
|
const { workflowId, data_type } = parsed
|
||||||
|
|
||||||
|
logger.info('Getting workflow data', {
|
||||||
|
workflowId,
|
||||||
|
dataType: data_type,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Get workspace ID from workflow
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId, variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!wf?.workspaceId) {
|
||||||
|
throw new Error('Workflow not found or has no workspace')
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = wf.workspaceId
|
||||||
|
|
||||||
|
switch (data_type) {
|
||||||
|
case 'global_variables':
|
||||||
|
return fetchGlobalVariables(wf.variables as Record<string, Variable> | null)
|
||||||
|
case 'custom_tools':
|
||||||
|
return await fetchCustomTools(workspaceId)
|
||||||
|
case 'mcp_tools':
|
||||||
|
return await fetchMcpTools(workspaceId)
|
||||||
|
case 'files':
|
||||||
|
// Files require workspace ID - we'd need to call an API or access storage
|
||||||
|
// For now, return empty array as files are typically accessed via API
|
||||||
|
return { files: [], message: 'File listing not yet implemented server-side' }
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown data type: ${data_type}`)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
function fetchGlobalVariables(workflowVariables: Record<string, Variable> | null) {
|
||||||
|
const variables: Array<{ id: string; name: string; value: unknown; tag: string }> = []
|
||||||
|
|
||||||
|
if (workflowVariables && typeof workflowVariables === 'object') {
|
||||||
|
for (const variable of Object.values(workflowVariables)) {
|
||||||
|
if (
|
||||||
|
typeof variable === 'object' &&
|
||||||
|
variable !== null &&
|
||||||
|
'name' in variable &&
|
||||||
|
typeof variable.name === 'string' &&
|
||||||
|
variable.name.trim() !== ''
|
||||||
|
) {
|
||||||
|
variables.push({
|
||||||
|
id: variable.id,
|
||||||
|
name: variable.name,
|
||||||
|
value: variable.value,
|
||||||
|
tag: `variable.${normalizeName(variable.name)}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Fetched workflow variables', { count: variables.length })
|
||||||
|
return { variables }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchCustomTools(workspaceId: string) {
|
||||||
|
const tools = await db
|
||||||
|
.select({
|
||||||
|
id: customTools.id,
|
||||||
|
title: customTools.title,
|
||||||
|
schema: customTools.schema,
|
||||||
|
})
|
||||||
|
.from(customTools)
|
||||||
|
.where(eq(customTools.workspaceId, workspaceId))
|
||||||
|
|
||||||
|
const formattedTools = tools.map((tool) => {
|
||||||
|
const schema = tool.schema as {
|
||||||
|
function?: { name?: string; description?: string; parameters?: unknown }
|
||||||
|
} | null
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: tool.id,
|
||||||
|
title: tool.title,
|
||||||
|
functionName: schema?.function?.name || '',
|
||||||
|
description: schema?.function?.description || '',
|
||||||
|
parameters: schema?.function?.parameters,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Fetched custom tools', { count: formattedTools.length })
|
||||||
|
return { customTools: formattedTools }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchMcpTools(workspaceId: string) {
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
id: mcpServersTable.id,
|
||||||
|
name: mcpServersTable.name,
|
||||||
|
url: mcpServersTable.url,
|
||||||
|
enabled: mcpServersTable.enabled,
|
||||||
|
})
|
||||||
|
.from(mcpServersTable)
|
||||||
|
.where(and(eq(mcpServersTable.workspaceId, workspaceId), eq(mcpServersTable.enabled, true)))
|
||||||
|
|
||||||
|
// For MCP tools, we return the server list
|
||||||
|
// Full tool discovery would require connecting to each server
|
||||||
|
const mcpServers = servers.map((server) => ({
|
||||||
|
serverId: server.id,
|
||||||
|
serverName: server.name,
|
||||||
|
url: server.url,
|
||||||
|
enabled: server.enabled,
|
||||||
|
}))
|
||||||
|
|
||||||
|
logger.info('Fetched MCP servers', { count: mcpServers.length })
|
||||||
|
return { mcpServers, message: 'MCP servers listed. Full tool discovery requires server connection.' }
|
||||||
|
}
|
||||||
@@ -0,0 +1,86 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq, ilike } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||||
|
|
||||||
|
const logger = createLogger('GetWorkflowFromNameServerTool')
|
||||||
|
|
||||||
|
export const GetWorkflowFromNameInput = z.object({
|
||||||
|
workflow_name: z.string().min(1),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const GetWorkflowFromNameResult = z.object({
|
||||||
|
userWorkflow: z.string(),
|
||||||
|
workflowId: z.string(),
|
||||||
|
workflowName: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type GetWorkflowFromNameInputType = z.infer<typeof GetWorkflowFromNameInput>
|
||||||
|
export type GetWorkflowFromNameResultType = z.infer<typeof GetWorkflowFromNameResult>
|
||||||
|
|
||||||
|
export const getWorkflowFromNameServerTool: BaseServerTool<
|
||||||
|
GetWorkflowFromNameInputType,
|
||||||
|
GetWorkflowFromNameResultType
|
||||||
|
> = {
|
||||||
|
name: 'get_workflow_from_name',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = GetWorkflowFromNameInput.parse(args)
|
||||||
|
const workflowName = parsed.workflow_name.trim()
|
||||||
|
|
||||||
|
logger.debug('Executing get_workflow_from_name', {
|
||||||
|
workflowName,
|
||||||
|
userId: context?.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User ID is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find workflow by name (case-insensitive)
|
||||||
|
const workflows = await db
|
||||||
|
.select({ id: workflow.id, name: workflow.name })
|
||||||
|
.from(workflow)
|
||||||
|
.where(and(eq(workflow.userId, context.userId), ilike(workflow.name, workflowName)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (workflows.length === 0) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowName}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const wf = workflows[0]
|
||||||
|
|
||||||
|
// Load workflow from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(wf.id)
|
||||||
|
|
||||||
|
if (!normalizedData?.blocks || Object.keys(normalizedData.blocks).length === 0) {
|
||||||
|
throw new Error('Workflow state is empty or invalid')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build workflow state from normalized data
|
||||||
|
const workflowState = {
|
||||||
|
blocks: normalizedData.blocks,
|
||||||
|
edges: normalizedData.edges || [],
|
||||||
|
loops: normalizedData.loops || {},
|
||||||
|
parallels: normalizedData.parallels || {},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanitize for copilot
|
||||||
|
const sanitizedState = sanitizeForCopilot(workflowState as any)
|
||||||
|
const userWorkflow = JSON.stringify(sanitizedState, null, 2)
|
||||||
|
|
||||||
|
logger.info('Retrieved workflow by name', {
|
||||||
|
workflowId: wf.id,
|
||||||
|
workflowName: wf.name,
|
||||||
|
})
|
||||||
|
|
||||||
|
return GetWorkflowFromNameResult.parse({
|
||||||
|
userWorkflow,
|
||||||
|
workflowId: wf.id,
|
||||||
|
workflowName: wf.name || workflowName,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import {
|
||||||
|
type ListUserWorkflowsInputType,
|
||||||
|
ListUserWorkflowsResult,
|
||||||
|
type ListUserWorkflowsResultType,
|
||||||
|
} from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
|
const logger = createLogger('ListUserWorkflowsServerTool')
|
||||||
|
|
||||||
|
export const listUserWorkflowsServerTool: BaseServerTool<
|
||||||
|
ListUserWorkflowsInputType,
|
||||||
|
ListUserWorkflowsResultType
|
||||||
|
> = {
|
||||||
|
name: 'list_user_workflows',
|
||||||
|
async execute(_args: unknown, context?: { userId: string }) {
|
||||||
|
logger.debug('Executing list_user_workflows', { userId: context?.userId })
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User ID is required to list workflows')
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflows = await db
|
||||||
|
.select({ id: workflow.id, name: workflow.name })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.userId, context.userId))
|
||||||
|
|
||||||
|
const names = workflows
|
||||||
|
.map((w) => w.name)
|
||||||
|
.filter((n): n is string => typeof n === 'string' && n.length > 0)
|
||||||
|
|
||||||
|
logger.info('Found workflows', { count: names.length, userId: context.userId })
|
||||||
|
|
||||||
|
return ListUserWorkflowsResult.parse({
|
||||||
|
workflow_names: names,
|
||||||
|
count: names.length,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,83 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('ListWorkspaceMcpServersServerTool')
|
||||||
|
|
||||||
|
export const ListWorkspaceMcpServersInput = z.object({
|
||||||
|
workspaceId: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const ListWorkspaceMcpServersResult = z.object({
|
||||||
|
servers: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().nullable(),
|
||||||
|
toolCount: z.number(),
|
||||||
|
toolNames: z.array(z.string()),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
count: z.number(),
|
||||||
|
message: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type ListWorkspaceMcpServersInputType = z.infer<typeof ListWorkspaceMcpServersInput>
|
||||||
|
export type ListWorkspaceMcpServersResultType = z.infer<typeof ListWorkspaceMcpServersResult>
|
||||||
|
|
||||||
|
export const listWorkspaceMcpServersServerTool: BaseServerTool<
|
||||||
|
ListWorkspaceMcpServersInputType,
|
||||||
|
ListWorkspaceMcpServersResultType
|
||||||
|
> = {
|
||||||
|
name: 'list_workspace_mcp_servers',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = ListWorkspaceMcpServersInput.parse(args)
|
||||||
|
const { workspaceId } = parsed
|
||||||
|
|
||||||
|
logger.debug('Listing workspace MCP servers', { workspaceId })
|
||||||
|
|
||||||
|
// Get all MCP servers in the workspace with their tool counts
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
id: workflowMcpServer.id,
|
||||||
|
name: workflowMcpServer.name,
|
||||||
|
description: workflowMcpServer.description,
|
||||||
|
})
|
||||||
|
.from(workflowMcpServer)
|
||||||
|
.where(eq(workflowMcpServer.workspaceId, workspaceId))
|
||||||
|
|
||||||
|
// Get tool names for each server
|
||||||
|
const serversWithTools = await Promise.all(
|
||||||
|
servers.map(async (server) => {
|
||||||
|
const tools = await db
|
||||||
|
.select({ toolName: workflowMcpTool.toolName })
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(eq(workflowMcpTool.serverId, server.id))
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: server.id,
|
||||||
|
name: server.name,
|
||||||
|
description: server.description,
|
||||||
|
toolCount: tools.length,
|
||||||
|
toolNames: tools.map((t) => t.toolName),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const message =
|
||||||
|
serversWithTools.length === 0
|
||||||
|
? 'No MCP servers found in this workspace. Use create_workspace_mcp_server to create one.'
|
||||||
|
: `Found ${serversWithTools.length} MCP server(s) in the workspace.`
|
||||||
|
|
||||||
|
logger.info('Listed MCP servers', { workspaceId, count: serversWithTools.length })
|
||||||
|
|
||||||
|
return ListWorkspaceMcpServersResult.parse({
|
||||||
|
servers: serversWithTools,
|
||||||
|
count: serversWithTools.length,
|
||||||
|
message,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
232
apps/sim/lib/copilot/tools/server/workflow/manage-custom-tool.ts
Normal file
232
apps/sim/lib/copilot/tools/server/workflow/manage-custom-tool.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { customTools, workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '../base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('ManageCustomToolServerTool')
|
||||||
|
|
||||||
|
const CustomToolSchemaZ = z.object({
|
||||||
|
type: z.literal('function'),
|
||||||
|
function: z.object({
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
parameters: z.object({
|
||||||
|
type: z.string(),
|
||||||
|
properties: z.record(z.any()),
|
||||||
|
required: z.array(z.string()).optional(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const ManageCustomToolInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
operation: z.enum(['add', 'edit', 'delete', 'list']),
|
||||||
|
toolId: z.string().optional(),
|
||||||
|
schema: CustomToolSchemaZ.optional(),
|
||||||
|
code: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type ManageCustomToolResult = {
|
||||||
|
success: boolean
|
||||||
|
operation: string
|
||||||
|
toolId?: string
|
||||||
|
functionName?: string
|
||||||
|
customTools?: Array<{
|
||||||
|
id: string
|
||||||
|
title: string
|
||||||
|
functionName: string
|
||||||
|
description: string
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
|
||||||
|
export const manageCustomToolServerTool: BaseServerTool<
|
||||||
|
typeof ManageCustomToolInput,
|
||||||
|
ManageCustomToolResult
|
||||||
|
> = {
|
||||||
|
name: 'manage_custom_tool',
|
||||||
|
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = ManageCustomToolInput.parse(args)
|
||||||
|
const { workflowId, operation, toolId, schema, code } = parsed
|
||||||
|
|
||||||
|
// Get workspace ID from workflow if not provided
|
||||||
|
let workspaceId = parsed.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!wf?.workspaceId) {
|
||||||
|
throw new Error('Workflow not found or has no workspace')
|
||||||
|
}
|
||||||
|
workspaceId = wf.workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Managing custom tool', {
|
||||||
|
operation,
|
||||||
|
toolId,
|
||||||
|
functionName: schema?.function?.name,
|
||||||
|
workspaceId,
|
||||||
|
})
|
||||||
|
|
||||||
|
switch (operation) {
|
||||||
|
case 'add':
|
||||||
|
return await addCustomTool(workspaceId, schema, code, context?.userId)
|
||||||
|
case 'edit':
|
||||||
|
return await editCustomTool(workspaceId, toolId, schema, code)
|
||||||
|
case 'delete':
|
||||||
|
return await deleteCustomTool(workspaceId, toolId)
|
||||||
|
case 'list':
|
||||||
|
return await listCustomTools(workspaceId)
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown operation: ${operation}`)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addCustomTool(
|
||||||
|
workspaceId: string,
|
||||||
|
schema: z.infer<typeof CustomToolSchemaZ> | undefined,
|
||||||
|
code: string | undefined,
|
||||||
|
userId: string | undefined
|
||||||
|
): Promise<ManageCustomToolResult> {
|
||||||
|
if (!schema) {
|
||||||
|
throw new Error('Schema is required for adding a custom tool')
|
||||||
|
}
|
||||||
|
if (!code) {
|
||||||
|
throw new Error('Code is required for adding a custom tool')
|
||||||
|
}
|
||||||
|
if (!userId) {
|
||||||
|
throw new Error('User ID is required for adding a custom tool')
|
||||||
|
}
|
||||||
|
|
||||||
|
const functionName = schema.function.name
|
||||||
|
|
||||||
|
const [created] = await db
|
||||||
|
.insert(customTools)
|
||||||
|
.values({
|
||||||
|
id: nanoid(),
|
||||||
|
workspaceId,
|
||||||
|
userId,
|
||||||
|
title: functionName,
|
||||||
|
schema: schema as any,
|
||||||
|
code,
|
||||||
|
})
|
||||||
|
.returning({ id: customTools.id })
|
||||||
|
|
||||||
|
logger.info(`Created custom tool: ${functionName}`, { toolId: created.id })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'add',
|
||||||
|
toolId: created.id,
|
||||||
|
functionName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function editCustomTool(
|
||||||
|
workspaceId: string,
|
||||||
|
toolId: string | undefined,
|
||||||
|
schema: z.infer<typeof CustomToolSchemaZ> | undefined,
|
||||||
|
code: string | undefined
|
||||||
|
): Promise<ManageCustomToolResult> {
|
||||||
|
if (!toolId) {
|
||||||
|
throw new Error('Tool ID is required for editing a custom tool')
|
||||||
|
}
|
||||||
|
if (!schema && !code) {
|
||||||
|
throw new Error('At least one of schema or code must be provided for editing')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get existing tool
|
||||||
|
const [existing] = await db
|
||||||
|
.select()
|
||||||
|
.from(customTools)
|
||||||
|
.where(and(eq(customTools.id, toolId), eq(customTools.workspaceId, workspaceId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
throw new Error(`Tool with ID ${toolId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mergedSchema = schema ?? (existing.schema as z.infer<typeof CustomToolSchemaZ>)
|
||||||
|
const mergedCode = code ?? existing.code
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(customTools)
|
||||||
|
.set({
|
||||||
|
title: mergedSchema.function.name,
|
||||||
|
schema: mergedSchema as any,
|
||||||
|
code: mergedCode,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(customTools.id, toolId))
|
||||||
|
|
||||||
|
const functionName = mergedSchema.function.name
|
||||||
|
logger.info(`Updated custom tool: ${functionName}`, { toolId })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'edit',
|
||||||
|
toolId,
|
||||||
|
functionName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteCustomTool(
|
||||||
|
workspaceId: string,
|
||||||
|
toolId: string | undefined
|
||||||
|
): Promise<ManageCustomToolResult> {
|
||||||
|
if (!toolId) {
|
||||||
|
throw new Error('Tool ID is required for deleting a custom tool')
|
||||||
|
}
|
||||||
|
|
||||||
|
await db
|
||||||
|
.delete(customTools)
|
||||||
|
.where(and(eq(customTools.id, toolId), eq(customTools.workspaceId, workspaceId)))
|
||||||
|
|
||||||
|
logger.info(`Deleted custom tool: ${toolId}`)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'delete',
|
||||||
|
toolId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function listCustomTools(workspaceId: string): Promise<ManageCustomToolResult> {
|
||||||
|
const tools = await db
|
||||||
|
.select({
|
||||||
|
id: customTools.id,
|
||||||
|
title: customTools.title,
|
||||||
|
schema: customTools.schema,
|
||||||
|
})
|
||||||
|
.from(customTools)
|
||||||
|
.where(eq(customTools.workspaceId, workspaceId))
|
||||||
|
|
||||||
|
const formattedTools = tools.map((tool) => {
|
||||||
|
const schema = tool.schema as {
|
||||||
|
function?: { name?: string; description?: string }
|
||||||
|
} | null
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: tool.id,
|
||||||
|
title: tool.title || '',
|
||||||
|
functionName: schema?.function?.name || '',
|
||||||
|
description: schema?.function?.description || '',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Listed custom tools', { count: formattedTools.length })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'list',
|
||||||
|
customTools: formattedTools,
|
||||||
|
}
|
||||||
|
}
|
||||||
189
apps/sim/lib/copilot/tools/server/workflow/manage-mcp-tool.ts
Normal file
189
apps/sim/lib/copilot/tools/server/workflow/manage-mcp-tool.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { mcpServers, workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '../base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('ManageMcpToolServerTool')
|
||||||
|
|
||||||
|
const McpServerConfigZ = z.object({
|
||||||
|
name: z.string(),
|
||||||
|
transport: z.literal('streamable-http').optional().default('streamable-http'),
|
||||||
|
url: z.string().optional(),
|
||||||
|
headers: z.record(z.string()).optional(),
|
||||||
|
timeout: z.number().optional().default(30000),
|
||||||
|
enabled: z.boolean().optional().default(true),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const ManageMcpToolInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
operation: z.enum(['add', 'edit', 'delete']),
|
||||||
|
serverId: z.string().optional(),
|
||||||
|
config: McpServerConfigZ.optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
type ManageMcpToolResult = {
|
||||||
|
success: boolean
|
||||||
|
operation: string
|
||||||
|
serverId?: string
|
||||||
|
serverName?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export const manageMcpToolServerTool: BaseServerTool<
|
||||||
|
typeof ManageMcpToolInput,
|
||||||
|
ManageMcpToolResult
|
||||||
|
> = {
|
||||||
|
name: 'manage_mcp_tool',
|
||||||
|
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = ManageMcpToolInput.parse(args)
|
||||||
|
const { workflowId, operation, serverId, config } = parsed
|
||||||
|
|
||||||
|
// Get workspace ID from workflow if not provided
|
||||||
|
let workspaceId = parsed.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!wf?.workspaceId) {
|
||||||
|
throw new Error('Workflow not found or has no workspace')
|
||||||
|
}
|
||||||
|
workspaceId = wf.workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('Managing MCP tool', {
|
||||||
|
operation,
|
||||||
|
serverId,
|
||||||
|
serverName: config?.name,
|
||||||
|
workspaceId,
|
||||||
|
})
|
||||||
|
|
||||||
|
switch (operation) {
|
||||||
|
case 'add':
|
||||||
|
return await addMcpServer(workspaceId, config, context?.userId)
|
||||||
|
case 'edit':
|
||||||
|
return await editMcpServer(workspaceId, serverId, config)
|
||||||
|
case 'delete':
|
||||||
|
return await deleteMcpServer(workspaceId, serverId)
|
||||||
|
default:
|
||||||
|
throw new Error(`Unknown operation: ${operation}`)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
async function addMcpServer(
|
||||||
|
workspaceId: string,
|
||||||
|
config: z.infer<typeof McpServerConfigZ> | undefined,
|
||||||
|
userId: string | undefined
|
||||||
|
): Promise<ManageMcpToolResult> {
|
||||||
|
if (!config) {
|
||||||
|
throw new Error('Config is required for adding an MCP tool')
|
||||||
|
}
|
||||||
|
if (!config.name) {
|
||||||
|
throw new Error('Server name is required')
|
||||||
|
}
|
||||||
|
if (!config.url) {
|
||||||
|
throw new Error('Server URL is required for streamable-http transport')
|
||||||
|
}
|
||||||
|
if (!userId) {
|
||||||
|
throw new Error('User ID is required for adding an MCP tool')
|
||||||
|
}
|
||||||
|
|
||||||
|
const [created] = await db
|
||||||
|
.insert(mcpServers)
|
||||||
|
.values({
|
||||||
|
id: nanoid(),
|
||||||
|
workspaceId,
|
||||||
|
createdBy: userId,
|
||||||
|
name: config.name,
|
||||||
|
url: config.url,
|
||||||
|
transport: config.transport || 'streamable-http',
|
||||||
|
headers: config.headers || {},
|
||||||
|
timeout: config.timeout || 30000,
|
||||||
|
enabled: config.enabled !== false,
|
||||||
|
})
|
||||||
|
.returning({ id: mcpServers.id })
|
||||||
|
|
||||||
|
logger.info(`Created MCP server: ${config.name}`, { serverId: created.id })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'add',
|
||||||
|
serverId: created.id,
|
||||||
|
serverName: config.name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function editMcpServer(
|
||||||
|
workspaceId: string,
|
||||||
|
serverId: string | undefined,
|
||||||
|
config: z.infer<typeof McpServerConfigZ> | undefined
|
||||||
|
): Promise<ManageMcpToolResult> {
|
||||||
|
if (!serverId) {
|
||||||
|
throw new Error('Server ID is required for editing an MCP tool')
|
||||||
|
}
|
||||||
|
if (!config) {
|
||||||
|
throw new Error('Config is required for editing an MCP tool')
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify server exists
|
||||||
|
const [existing] = await db
|
||||||
|
.select({ id: mcpServers.id, name: mcpServers.name })
|
||||||
|
.from(mcpServers)
|
||||||
|
.where(and(eq(mcpServers.id, serverId), eq(mcpServers.workspaceId, workspaceId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
throw new Error(`MCP server with ID ${serverId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateData: Record<string, unknown> = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.name) updateData.name = config.name
|
||||||
|
if (config.url) updateData.url = config.url
|
||||||
|
if (config.transport) updateData.transport = config.transport
|
||||||
|
if (config.headers) updateData.headers = config.headers
|
||||||
|
if (config.timeout !== undefined) updateData.timeout = config.timeout
|
||||||
|
if (config.enabled !== undefined) updateData.enabled = config.enabled
|
||||||
|
|
||||||
|
await db.update(mcpServers).set(updateData).where(eq(mcpServers.id, serverId))
|
||||||
|
|
||||||
|
const serverName = config.name || existing.name
|
||||||
|
logger.info(`Updated MCP server: ${serverName}`, { serverId })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'edit',
|
||||||
|
serverId,
|
||||||
|
serverName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteMcpServer(
|
||||||
|
workspaceId: string,
|
||||||
|
serverId: string | undefined
|
||||||
|
): Promise<ManageMcpToolResult> {
|
||||||
|
if (!serverId) {
|
||||||
|
throw new Error('Server ID is required for deleting an MCP tool')
|
||||||
|
}
|
||||||
|
|
||||||
|
await db
|
||||||
|
.delete(mcpServers)
|
||||||
|
.where(and(eq(mcpServers.id, serverId), eq(mcpServers.workspaceId, workspaceId)))
|
||||||
|
|
||||||
|
logger.info(`Deleted MCP server: ${serverId}`)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
operation: 'delete',
|
||||||
|
serverId,
|
||||||
|
}
|
||||||
|
}
|
||||||
87
apps/sim/lib/copilot/tools/server/workflow/redeploy.ts
Normal file
87
apps/sim/lib/copilot/tools/server/workflow/redeploy.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { desc, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('RedeployServerTool')
|
||||||
|
|
||||||
|
export const RedeployInput = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const RedeployResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
workflowId: z.string(),
|
||||||
|
deployedAt: z.string().nullable(),
|
||||||
|
message: z.string(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type RedeployInputType = z.infer<typeof RedeployInput>
|
||||||
|
export type RedeployResultType = z.infer<typeof RedeployResult>
|
||||||
|
|
||||||
|
export const redeployServerTool: BaseServerTool<RedeployInputType, RedeployResultType> = {
|
||||||
|
name: 'redeploy',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = RedeployInput.parse(args)
|
||||||
|
const { workflowId } = parsed
|
||||||
|
|
||||||
|
logger.debug('Redeploying workflow', { workflowId })
|
||||||
|
|
||||||
|
// Get workflow state
|
||||||
|
const [wf] = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get current max version
|
||||||
|
const [maxVersion] = await db
|
||||||
|
.select({ version: workflowDeploymentVersion.version })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
.orderBy(desc(workflowDeploymentVersion.version))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const newVersion = (maxVersion?.version || 0) + 1
|
||||||
|
|
||||||
|
// Deactivate all existing versions
|
||||||
|
await db
|
||||||
|
.update(workflowDeploymentVersion)
|
||||||
|
.set({ isActive: false })
|
||||||
|
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||||
|
|
||||||
|
// Create new deployment version
|
||||||
|
const deploymentId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
// Load workflow state from normalized tables
|
||||||
|
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
const workflowState = {
|
||||||
|
blocks: normalizedData?.blocks || {},
|
||||||
|
edges: normalizedData?.edges || [],
|
||||||
|
loops: normalizedData?.loops || {},
|
||||||
|
parallels: normalizedData?.parallels || {},
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(workflowDeploymentVersion).values({
|
||||||
|
id: deploymentId,
|
||||||
|
workflowId,
|
||||||
|
version: newVersion,
|
||||||
|
state: workflowState,
|
||||||
|
isActive: true,
|
||||||
|
createdAt: now,
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Workflow redeployed', { workflowId, version: newVersion })
|
||||||
|
|
||||||
|
return RedeployResult.parse({
|
||||||
|
success: true,
|
||||||
|
workflowId,
|
||||||
|
deployedAt: now.toISOString(),
|
||||||
|
message: `Workflow redeployed (version ${newVersion})`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
130
apps/sim/lib/copilot/tools/server/workflow/run-workflow.ts
Normal file
130
apps/sim/lib/copilot/tools/server/workflow/run-workflow.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('RunWorkflowServerTool')
|
||||||
|
|
||||||
|
export const RunWorkflowInput = z.object({
|
||||||
|
workflowId: z.string().min(1),
|
||||||
|
workflow_input: z.record(z.any()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const RunWorkflowResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
executionId: z.string().nullable(),
|
||||||
|
executionStartTime: z.string().nullable(),
|
||||||
|
output: z.any().nullable(),
|
||||||
|
message: z.string(),
|
||||||
|
error: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type RunWorkflowInputType = z.infer<typeof RunWorkflowInput>
|
||||||
|
export type RunWorkflowResultType = z.infer<typeof RunWorkflowResult>
|
||||||
|
|
||||||
|
export const runWorkflowServerTool: BaseServerTool<RunWorkflowInputType, RunWorkflowResultType> = {
|
||||||
|
name: 'run_workflow',
|
||||||
|
async execute(args: unknown, context?: { userId: string }) {
|
||||||
|
const parsed = RunWorkflowInput.parse(args)
|
||||||
|
const { workflowId, workflow_input } = parsed
|
||||||
|
|
||||||
|
if (!context?.userId) {
|
||||||
|
throw new Error('User authentication required')
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Running workflow', { workflowId, hasInput: !!workflow_input })
|
||||||
|
|
||||||
|
// Get workflow info
|
||||||
|
const [wf] = await db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if workflow is deployed
|
||||||
|
const [deployment] = await db
|
||||||
|
.select({ id: workflowDeploymentVersion.id })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||||
|
eq(workflowDeploymentVersion.isActive, true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const executionId = crypto.randomUUID()
|
||||||
|
const executionStartTime = new Date().toISOString()
|
||||||
|
|
||||||
|
// If workflow is deployed, we can use the execute API
|
||||||
|
// Otherwise we need to execute directly
|
||||||
|
const appUrl = process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Call the internal execution endpoint
|
||||||
|
// Note: For server-side execution without a browser, we call the API directly
|
||||||
|
const executeUrl = `${appUrl}/api/workflows/${workflowId}/execute`
|
||||||
|
|
||||||
|
const response = await fetch(executeUrl, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
// Use internal auth header
|
||||||
|
'X-Internal-Auth': context.userId,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
input: workflow_input || {},
|
||||||
|
executionId,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text().catch(() => '')
|
||||||
|
throw new Error(errorText || `Execution failed with status ${response.status}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json()
|
||||||
|
|
||||||
|
// Determine success from result
|
||||||
|
const succeeded = result.success !== false
|
||||||
|
const output = result.output || result.result || null
|
||||||
|
|
||||||
|
if (succeeded) {
|
||||||
|
logger.info('Workflow execution completed', { workflowId, executionId })
|
||||||
|
|
||||||
|
return RunWorkflowResult.parse({
|
||||||
|
success: true,
|
||||||
|
executionId,
|
||||||
|
executionStartTime,
|
||||||
|
output,
|
||||||
|
message: `Workflow execution completed. Started at: ${executionStartTime}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
const errorMessage = result.error || 'Workflow execution failed'
|
||||||
|
logger.error('Workflow execution failed', { workflowId, error: errorMessage })
|
||||||
|
|
||||||
|
return RunWorkflowResult.parse({
|
||||||
|
success: false,
|
||||||
|
executionId,
|
||||||
|
executionStartTime,
|
||||||
|
output: null,
|
||||||
|
message: errorMessage,
|
||||||
|
error: errorMessage,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
logger.error('Workflow execution error', { workflowId, error: errorMessage })
|
||||||
|
|
||||||
|
return RunWorkflowResult.parse({
|
||||||
|
success: false,
|
||||||
|
executionId,
|
||||||
|
executionStartTime,
|
||||||
|
output: null,
|
||||||
|
message: errorMessage,
|
||||||
|
error: errorMessage,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -0,0 +1,161 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||||
|
|
||||||
|
const logger = createLogger('SetGlobalWorkflowVariablesServerTool')
|
||||||
|
|
||||||
|
const OperationItemSchema = z.object({
|
||||||
|
operation: z.enum(['add', 'edit', 'delete']),
|
||||||
|
name: z.string(),
|
||||||
|
type: z.enum(['plain', 'number', 'boolean', 'array', 'object']).optional(),
|
||||||
|
value: z.string().optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const SetGlobalWorkflowVariablesInput = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
operations: z.array(OperationItemSchema),
|
||||||
|
})
|
||||||
|
|
||||||
|
export const SetGlobalWorkflowVariablesResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
message: z.string(),
|
||||||
|
variables: z.record(z.unknown()),
|
||||||
|
})
|
||||||
|
|
||||||
|
export type SetGlobalWorkflowVariablesInputType = z.infer<typeof SetGlobalWorkflowVariablesInput>
|
||||||
|
export type SetGlobalWorkflowVariablesResultType = z.infer<typeof SetGlobalWorkflowVariablesResult>
|
||||||
|
|
||||||
|
function coerceValue(
|
||||||
|
value: string | undefined,
|
||||||
|
type?: 'plain' | 'number' | 'boolean' | 'array' | 'object'
|
||||||
|
): unknown {
|
||||||
|
if (value === undefined) return value
|
||||||
|
const t = type || 'plain'
|
||||||
|
try {
|
||||||
|
if (t === 'number') {
|
||||||
|
const n = Number(value)
|
||||||
|
if (Number.isNaN(n)) return value
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
if (t === 'boolean') {
|
||||||
|
const v = String(value).trim().toLowerCase()
|
||||||
|
if (v === 'true') return true
|
||||||
|
if (v === 'false') return false
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
if (t === 'array' || t === 'object') {
|
||||||
|
const parsed = JSON.parse(value)
|
||||||
|
if (t === 'array' && Array.isArray(parsed)) return parsed
|
||||||
|
if (t === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed))
|
||||||
|
return parsed
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Fall through to return value as-is
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
export const setGlobalWorkflowVariablesServerTool: BaseServerTool<
|
||||||
|
SetGlobalWorkflowVariablesInputType,
|
||||||
|
SetGlobalWorkflowVariablesResultType
|
||||||
|
> = {
|
||||||
|
name: 'set_global_workflow_variables',
|
||||||
|
async execute(args: unknown, _context?: { userId: string }) {
|
||||||
|
const parsed = SetGlobalWorkflowVariablesInput.parse(args)
|
||||||
|
const { workflowId, operations } = parsed
|
||||||
|
|
||||||
|
logger.debug('Setting workflow variables', { workflowId, operationCount: operations.length })
|
||||||
|
|
||||||
|
// Get current workflow variables
|
||||||
|
const [wf] = await db
|
||||||
|
.select({ variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!wf) {
|
||||||
|
throw new Error(`Workflow not found: ${workflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentVarsRecord = (wf.variables as Record<string, unknown>) || {}
|
||||||
|
|
||||||
|
// Build mutable map by variable name
|
||||||
|
const byName: Record<string, Record<string, unknown>> = {}
|
||||||
|
Object.values(currentVarsRecord).forEach((v: unknown) => {
|
||||||
|
if (v && typeof v === 'object' && 'id' in v && 'name' in v) {
|
||||||
|
const variable = v as Record<string, unknown>
|
||||||
|
byName[String(variable.name)] = variable
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// Apply operations in order
|
||||||
|
for (const op of operations) {
|
||||||
|
const key = String(op.name)
|
||||||
|
const nextType = op.type || (byName[key]?.type as string) || 'plain'
|
||||||
|
|
||||||
|
if (op.operation === 'delete') {
|
||||||
|
delete byName[key]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const typedValue = coerceValue(
|
||||||
|
op.value,
|
||||||
|
nextType as 'plain' | 'number' | 'boolean' | 'array' | 'object'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (op.operation === 'add') {
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.operation === 'edit') {
|
||||||
|
if (!byName[key]) {
|
||||||
|
// If editing a non-existent variable, create it
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
byName[key] = {
|
||||||
|
...byName[key],
|
||||||
|
type: nextType,
|
||||||
|
...(op.value !== undefined ? { value: typedValue } : {}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert byName (keyed by name) to record keyed by ID for storage
|
||||||
|
const variablesRecord: Record<string, unknown> = {}
|
||||||
|
for (const v of Object.values(byName)) {
|
||||||
|
variablesRecord[v.id as string] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update workflow variables
|
||||||
|
await db.update(workflow).set({ variables: variablesRecord }).where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
logger.info('Updated workflow variables', {
|
||||||
|
workflowId,
|
||||||
|
variableCount: Object.keys(byName).length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return SetGlobalWorkflowVariablesResult.parse({
|
||||||
|
success: true,
|
||||||
|
message: 'Workflow variables updated',
|
||||||
|
variables: byName,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Common Schemas (shared across multiple tools and API routes)
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
// Generic envelope used by client to validate API responses
|
// Generic envelope used by client to validate API responses
|
||||||
export const ExecuteResponseSuccessSchema = z.object({
|
export const ExecuteResponseSuccessSchema = z.object({
|
||||||
success: z.literal(true),
|
success: z.literal(true),
|
||||||
@@ -7,6 +11,85 @@ export const ExecuteResponseSuccessSchema = z.object({
|
|||||||
})
|
})
|
||||||
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standard tool error structure.
|
||||||
|
* Used in ToolResult and tool_result events.
|
||||||
|
*/
|
||||||
|
export const ToolErrorSchema = z.object({
|
||||||
|
code: z.string(),
|
||||||
|
message: z.string(),
|
||||||
|
details: z.record(z.unknown()).optional(),
|
||||||
|
})
|
||||||
|
export type ToolError = z.infer<typeof ToolErrorSchema>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standard tool result structure.
|
||||||
|
* This is the canonical format for tool execution results across the system.
|
||||||
|
*/
|
||||||
|
export const ToolResultSchema = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
data: z.unknown().optional(),
|
||||||
|
error: ToolErrorSchema.optional(),
|
||||||
|
})
|
||||||
|
export type ToolResultType = z.infer<typeof ToolResultSchema>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark-complete payload schema.
|
||||||
|
* Used when calling the Go copilot's mark-complete endpoint.
|
||||||
|
*/
|
||||||
|
export const MarkCompletePayloadSchema = z.object({
|
||||||
|
/** Tool call ID */
|
||||||
|
id: z.string(),
|
||||||
|
/** Tool name */
|
||||||
|
name: z.string(),
|
||||||
|
/** HTTP-like status code (200 = success, 4xx = client error, 5xx = server error) */
|
||||||
|
status: z.number().int().min(100).max(599),
|
||||||
|
/** Optional message (typically error message or success description) */
|
||||||
|
message: z.unknown().optional(),
|
||||||
|
/** Optional data payload (tool result data) */
|
||||||
|
data: z.unknown().optional(),
|
||||||
|
})
|
||||||
|
export type MarkCompletePayload = z.infer<typeof MarkCompletePayloadSchema>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool result event from Go (received via SSE stream).
|
||||||
|
* This represents what we receive back after mark-complete.
|
||||||
|
*/
|
||||||
|
export const ToolResultEventSchema = z.object({
|
||||||
|
toolCallId: z.string().optional(),
|
||||||
|
data: z
|
||||||
|
.object({
|
||||||
|
id: z.string().optional(),
|
||||||
|
})
|
||||||
|
.optional(),
|
||||||
|
success: z.boolean().optional(),
|
||||||
|
failedDependency: z.boolean().optional(),
|
||||||
|
result: z
|
||||||
|
.object({
|
||||||
|
skipped: z.boolean().optional(),
|
||||||
|
})
|
||||||
|
.passthrough()
|
||||||
|
.optional(),
|
||||||
|
})
|
||||||
|
export type ToolResultEvent = z.infer<typeof ToolResultEventSchema>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to extract toolCallId from tool_result event data.
|
||||||
|
* Handles the various formats Go might send.
|
||||||
|
*/
|
||||||
|
export function extractToolCallId(data: unknown): string | undefined {
|
||||||
|
if (!data || typeof data !== 'object') return undefined
|
||||||
|
const d = data as Record<string, unknown>
|
||||||
|
// Try direct toolCallId first
|
||||||
|
if (typeof d.toolCallId === 'string') return d.toolCallId
|
||||||
|
// Then try nested data.id
|
||||||
|
if (d.data && typeof d.data === 'object') {
|
||||||
|
const nested = d.data as Record<string, unknown>
|
||||||
|
if (typeof nested.id === 'string') return nested.id
|
||||||
|
}
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
// get_blocks_and_tools
|
// get_blocks_and_tools
|
||||||
export const GetBlocksAndToolsInput = z.object({})
|
export const GetBlocksAndToolsInput = z.object({})
|
||||||
export const GetBlocksAndToolsResult = z.object({
|
export const GetBlocksAndToolsResult = z.object({
|
||||||
@@ -176,3 +259,206 @@ export const GetBlockUpstreamReferencesResult = z.object({
|
|||||||
})
|
})
|
||||||
export type GetBlockUpstreamReferencesInputType = z.infer<typeof GetBlockUpstreamReferencesInput>
|
export type GetBlockUpstreamReferencesInputType = z.infer<typeof GetBlockUpstreamReferencesInput>
|
||||||
export type GetBlockUpstreamReferencesResultType = z.infer<typeof GetBlockUpstreamReferencesResult>
|
export type GetBlockUpstreamReferencesResultType = z.infer<typeof GetBlockUpstreamReferencesResult>
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Search Tools
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// search_documentation
|
||||||
|
export const SearchDocumentationInput = z.object({
|
||||||
|
query: z.string().min(1),
|
||||||
|
topK: z.number().min(1).max(50).optional().default(10),
|
||||||
|
threshold: z.number().min(0).max(1).optional(),
|
||||||
|
})
|
||||||
|
export const SearchDocumentationResult = z.object({
|
||||||
|
results: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.number(),
|
||||||
|
title: z.string(),
|
||||||
|
url: z.string(),
|
||||||
|
content: z.string(),
|
||||||
|
similarity: z.number(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
query: z.string(),
|
||||||
|
totalResults: z.number(),
|
||||||
|
})
|
||||||
|
export type SearchDocumentationInputType = z.infer<typeof SearchDocumentationInput>
|
||||||
|
export type SearchDocumentationResultType = z.infer<typeof SearchDocumentationResult>
|
||||||
|
|
||||||
|
// search_online
|
||||||
|
export const SearchOnlineInput = z.object({
|
||||||
|
query: z.string().min(1),
|
||||||
|
num: z.number().min(1).max(100).optional().default(10),
|
||||||
|
type: z.string().optional().default('search'),
|
||||||
|
gl: z.string().optional(),
|
||||||
|
hl: z.string().optional(),
|
||||||
|
})
|
||||||
|
export const SearchOnlineResult = z.object({
|
||||||
|
results: z.array(z.record(z.unknown())),
|
||||||
|
query: z.string(),
|
||||||
|
type: z.string(),
|
||||||
|
totalResults: z.number(),
|
||||||
|
source: z.enum(['exa', 'serper']),
|
||||||
|
})
|
||||||
|
export type SearchOnlineInputType = z.infer<typeof SearchOnlineInput>
|
||||||
|
export type SearchOnlineResultType = z.infer<typeof SearchOnlineResult>
|
||||||
|
|
||||||
|
// make_api_request
|
||||||
|
export const MakeApiRequestInput = z.object({
|
||||||
|
url: z.string().url(),
|
||||||
|
method: z.enum(['GET', 'POST', 'PUT']),
|
||||||
|
queryParams: z.record(z.union([z.string(), z.number(), z.boolean()])).optional(),
|
||||||
|
headers: z.record(z.string()).optional(),
|
||||||
|
body: z.unknown().optional(),
|
||||||
|
})
|
||||||
|
export const MakeApiRequestResult = z.object({
|
||||||
|
data: z.unknown(),
|
||||||
|
status: z.number(),
|
||||||
|
headers: z.record(z.unknown()).optional(),
|
||||||
|
truncated: z.boolean().optional(),
|
||||||
|
totalChars: z.number().optional(),
|
||||||
|
previewChars: z.number().optional(),
|
||||||
|
note: z.string().optional(),
|
||||||
|
})
|
||||||
|
export type MakeApiRequestInputType = z.infer<typeof MakeApiRequestInput>
|
||||||
|
export type MakeApiRequestResultType = z.infer<typeof MakeApiRequestResult>
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// Workflow Tools
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// edit_workflow - input is complex, using passthrough for flexibility
|
||||||
|
// workflowId is optional - if not provided, uses the active workflow from context
|
||||||
|
export const EditWorkflowInput = z.object({
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
operations: z.array(z.record(z.unknown())),
|
||||||
|
currentUserWorkflow: z.unknown().optional(),
|
||||||
|
})
|
||||||
|
export const EditWorkflowResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
workflowState: z.unknown(),
|
||||||
|
inputValidationErrors: z.array(z.string()).optional(),
|
||||||
|
inputValidationMessage: z.string().optional(),
|
||||||
|
skippedItems: z.array(z.string()).optional(),
|
||||||
|
skippedItemsMessage: z.string().optional(),
|
||||||
|
})
|
||||||
|
export type EditWorkflowInputType = z.infer<typeof EditWorkflowInput>
|
||||||
|
export type EditWorkflowResultType = z.infer<typeof EditWorkflowResult>
|
||||||
|
|
||||||
|
// get_workflow_console
|
||||||
|
export const GetWorkflowConsoleInput = z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
limit: z.number().min(1).max(50).optional().default(2),
|
||||||
|
includeDetails: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
export const GetWorkflowConsoleResult = z.array(
|
||||||
|
z.object({
|
||||||
|
executionId: z.string(),
|
||||||
|
startedAt: z.string(),
|
||||||
|
blocks: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
startedAt: z.string(),
|
||||||
|
endedAt: z.string(),
|
||||||
|
durationMs: z.number(),
|
||||||
|
output: z.unknown(),
|
||||||
|
error: z.string().optional(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
export type GetWorkflowConsoleInputType = z.infer<typeof GetWorkflowConsoleInput>
|
||||||
|
export type GetWorkflowConsoleResultType = z.infer<typeof GetWorkflowConsoleResult>
|
||||||
|
|
||||||
|
// list_user_workflows
|
||||||
|
export const ListUserWorkflowsInput = z.object({})
|
||||||
|
export const ListUserWorkflowsResult = z.object({
|
||||||
|
workflow_names: z.array(z.string()),
|
||||||
|
count: z.number(),
|
||||||
|
})
|
||||||
|
export type ListUserWorkflowsInputType = z.infer<typeof ListUserWorkflowsInput>
|
||||||
|
export type ListUserWorkflowsResultType = z.infer<typeof ListUserWorkflowsResult>
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// User Tools
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// get_credentials
|
||||||
|
export const GetCredentialsInput = z.object({
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
})
|
||||||
|
export const GetCredentialsResult = z.object({
|
||||||
|
oauth: z.object({
|
||||||
|
connected: z.object({
|
||||||
|
credentials: z.array(
|
||||||
|
z.object({
|
||||||
|
id: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
provider: z.string(),
|
||||||
|
serviceName: z.string(),
|
||||||
|
lastUsed: z.string(),
|
||||||
|
isDefault: z.boolean(),
|
||||||
|
accessToken: z.string().nullable(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
total: z.number(),
|
||||||
|
}),
|
||||||
|
notConnected: z.object({
|
||||||
|
services: z.array(
|
||||||
|
z.object({
|
||||||
|
providerId: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().optional(),
|
||||||
|
baseProvider: z.string().optional(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
total: z.number(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
environment: z.object({
|
||||||
|
variableNames: z.array(z.string()),
|
||||||
|
count: z.number(),
|
||||||
|
personalVariables: z.array(z.string()),
|
||||||
|
workspaceVariables: z.array(z.string()),
|
||||||
|
conflicts: z.array(z.string()).optional(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
export type GetCredentialsInputType = z.infer<typeof GetCredentialsInput>
|
||||||
|
export type GetCredentialsResultType = z.infer<typeof GetCredentialsResult>
|
||||||
|
|
||||||
|
// set_environment_variables
|
||||||
|
export const SetEnvironmentVariablesInput = z.object({
|
||||||
|
variables: z.array(
|
||||||
|
z.object({
|
||||||
|
key: z.string(),
|
||||||
|
value: z.string(),
|
||||||
|
})
|
||||||
|
),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
})
|
||||||
|
export const SetEnvironmentVariablesResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
message: z.string(),
|
||||||
|
savedCount: z.number().optional(),
|
||||||
|
variables: z.array(z.string()).optional(),
|
||||||
|
})
|
||||||
|
|
||||||
|
// set_context - for headless mode to dynamically set the workflow context
|
||||||
|
export const SetContextInput = z.object({
|
||||||
|
/** The workflow ID to set as the current context */
|
||||||
|
workflowId: z.string().min(1, 'workflowId is required'),
|
||||||
|
})
|
||||||
|
export const SetContextResult = z.object({
|
||||||
|
success: z.boolean(),
|
||||||
|
/** The resolved execution context - Go should store this and include in tool_call events */
|
||||||
|
executionContext: z.object({
|
||||||
|
workflowId: z.string(),
|
||||||
|
workspaceId: z.string().optional(),
|
||||||
|
userId: z.string(),
|
||||||
|
}),
|
||||||
|
message: z.string(),
|
||||||
|
})
|
||||||
|
export type SetContextInputType = z.infer<typeof SetContextInput>
|
||||||
|
export type SetContextResultType = z.infer<typeof SetContextResult>
|
||||||
|
|||||||
@@ -5,6 +5,11 @@ import { create } from 'zustand'
|
|||||||
import { devtools } from 'zustand/middleware'
|
import { devtools } from 'zustand/middleware'
|
||||||
import { type CopilotChat, sendStreamingMessage } from '@/lib/copilot/api'
|
import { type CopilotChat, sendStreamingMessage } from '@/lib/copilot/api'
|
||||||
import type { CopilotTransportMode } from '@/lib/copilot/models'
|
import type { CopilotTransportMode } from '@/lib/copilot/models'
|
||||||
|
import {
|
||||||
|
isServerExecutedToolSync,
|
||||||
|
markToolCallServerHandled,
|
||||||
|
prefetchServerExecutedTools,
|
||||||
|
} from '@/lib/copilot/server-executed-tools'
|
||||||
import type {
|
import type {
|
||||||
BaseClientToolMetadata,
|
BaseClientToolMetadata,
|
||||||
ClientToolDisplay,
|
ClientToolDisplay,
|
||||||
@@ -75,6 +80,7 @@ import { ManageMcpToolClientTool } from '@/lib/copilot/tools/client/workflow/man
|
|||||||
import { RedeployClientTool } from '@/lib/copilot/tools/client/workflow/redeploy'
|
import { RedeployClientTool } from '@/lib/copilot/tools/client/workflow/redeploy'
|
||||||
import { RunWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/run-workflow'
|
import { RunWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/run-workflow'
|
||||||
import { SetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/set-global-workflow-variables'
|
import { SetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/set-global-workflow-variables'
|
||||||
|
import { extractToolCallId } from '@/lib/copilot/tools/shared/schemas'
|
||||||
import { getQueryClient } from '@/app/_shell/providers/query-provider'
|
import { getQueryClient } from '@/app/_shell/providers/query-provider'
|
||||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||||
import type {
|
import type {
|
||||||
@@ -347,6 +353,17 @@ function isBackgroundState(state: any): boolean {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a tool call state is aborted
|
||||||
|
*/
|
||||||
|
function isAbortedState(state: any): boolean {
|
||||||
|
try {
|
||||||
|
return state === 'aborted' || state === (ClientToolCallState as any).aborted
|
||||||
|
} catch {
|
||||||
|
return state === 'aborted'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a tool call state is terminal (success, error, rejected, aborted, review, or background)
|
* Checks if a tool call state is terminal (success, error, rejected, aborted, review, or background)
|
||||||
*/
|
*/
|
||||||
@@ -1138,8 +1155,8 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
tool_result: (data, context, get, set) => {
|
tool_result: (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const toolCallId: string | undefined = data?.toolCallId || data?.data?.id
|
const toolCallId = extractToolCallId(data)
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = (data as Record<string, unknown>)?.success as boolean
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const skipped: boolean = data?.result?.skipped === true
|
const skipped: boolean = data?.result?.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
@@ -1149,9 +1166,10 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
isReviewState(current.state) ||
|
isReviewState(current.state) ||
|
||||||
isBackgroundState(current.state)
|
isBackgroundState(current.state) ||
|
||||||
|
isAbortedState(current.state)
|
||||||
) {
|
) {
|
||||||
// Preserve terminal review/rejected state; do not override
|
// Preserve terminal review/rejected/aborted state; do not override
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = success
|
const targetState = success
|
||||||
@@ -1207,7 +1225,8 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (
|
if (
|
||||||
isRejectedState(b.toolCall?.state) ||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
isReviewState(b.toolCall?.state) ||
|
isReviewState(b.toolCall?.state) ||
|
||||||
isBackgroundState(b.toolCall?.state)
|
isBackgroundState(b.toolCall?.state) ||
|
||||||
|
isAbortedState(b.toolCall?.state)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
const targetState = success
|
const targetState = success
|
||||||
@@ -1236,8 +1255,8 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
tool_error: (data, context, get, set) => {
|
tool_error: (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const toolCallId: string | undefined = data?.toolCallId || data?.data?.id
|
const toolCallId = extractToolCallId(data)
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = (data as Record<string, unknown>)?.failedDependency === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
@@ -1245,7 +1264,8 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
isReviewState(current.state) ||
|
isReviewState(current.state) ||
|
||||||
isBackgroundState(current.state)
|
isBackgroundState(current.state) ||
|
||||||
|
isAbortedState(current.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -1271,7 +1291,8 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (
|
if (
|
||||||
isRejectedState(b.toolCall?.state) ||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
isReviewState(b.toolCall?.state) ||
|
isReviewState(b.toolCall?.state) ||
|
||||||
isBackgroundState(b.toolCall?.state)
|
isBackgroundState(b.toolCall?.state) ||
|
||||||
|
isAbortedState(b.toolCall?.state)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
const targetState = failedDependency
|
const targetState = failedDependency
|
||||||
@@ -1362,6 +1383,28 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if this tool is executed server-side
|
||||||
|
// If so, skip client execution - the server will handle it and send tool_result
|
||||||
|
if (name && isServerExecutedToolSync(name)) {
|
||||||
|
markToolCallServerHandled(id, name)
|
||||||
|
logger.info('[toolCallsById] Tool is server-executed, skipping client execution', {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
})
|
||||||
|
// Update state to executing to show progress in UI
|
||||||
|
const executingMap = { ...get().toolCallsById }
|
||||||
|
executingMap[id] = {
|
||||||
|
...executingMap[id],
|
||||||
|
state: ClientToolCallState.executing,
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.executing, id, args),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: executingMap })
|
||||||
|
// Update inline content block
|
||||||
|
upsertToolCallBlock(context, executingMap[id])
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// Prefer interface-based registry to determine interrupt and execute
|
// Prefer interface-based registry to determine interrupt and execute
|
||||||
try {
|
try {
|
||||||
const def = name ? getTool(name) : undefined
|
const def = name ? getTool(name) : undefined
|
||||||
@@ -1419,11 +1462,12 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
? result.status >= 200 && result.status < 300
|
? result.status >= 200 && result.status < 300
|
||||||
: true
|
: true
|
||||||
const completeMap = { ...get().toolCallsById }
|
const completeMap = { ...get().toolCallsById }
|
||||||
// Do not override terminal review/rejected
|
// Do not override terminal review/rejected/aborted
|
||||||
if (
|
if (
|
||||||
isRejectedState(completeMap[id]?.state) ||
|
isRejectedState(completeMap[id]?.state) ||
|
||||||
isReviewState(completeMap[id]?.state) ||
|
isReviewState(completeMap[id]?.state) ||
|
||||||
isBackgroundState(completeMap[id]?.state)
|
isBackgroundState(completeMap[id]?.state) ||
|
||||||
|
isAbortedState(completeMap[id]?.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -1461,11 +1505,12 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
})
|
})
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
const errorMap = { ...get().toolCallsById }
|
const errorMap = { ...get().toolCallsById }
|
||||||
// Do not override terminal review/rejected
|
// Do not override terminal review/rejected/aborted
|
||||||
if (
|
if (
|
||||||
isRejectedState(errorMap[id]?.state) ||
|
isRejectedState(errorMap[id]?.state) ||
|
||||||
isReviewState(errorMap[id]?.state) ||
|
isReviewState(errorMap[id]?.state) ||
|
||||||
isBackgroundState(errorMap[id]?.state)
|
isBackgroundState(errorMap[id]?.state) ||
|
||||||
|
isAbortedState(errorMap[id]?.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -1530,11 +1575,12 @@ const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
const errorMap = { ...get().toolCallsById }
|
const errorMap = { ...get().toolCallsById }
|
||||||
// Do not override terminal review/rejected
|
// Do not override terminal review/rejected/aborted
|
||||||
if (
|
if (
|
||||||
isRejectedState(errorMap[id]?.state) ||
|
isRejectedState(errorMap[id]?.state) ||
|
||||||
isReviewState(errorMap[id]?.state) ||
|
isReviewState(errorMap[id]?.state) ||
|
||||||
isBackgroundState(errorMap[id]?.state)
|
isBackgroundState(errorMap[id]?.state) ||
|
||||||
|
isAbortedState(errorMap[id]?.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -2157,8 +2203,8 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
if (!parentToolCallId) return
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
const toolCallId: string | undefined = data?.toolCallId || data?.data?.id
|
const toolCallId = extractToolCallId(data)
|
||||||
const success: boolean | undefined = data?.success !== false // Default to true if not specified
|
const success: boolean | undefined = (data as Record<string, unknown>)?.success !== false // Default to true if not specified
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
|
|
||||||
// Initialize if needed
|
// Initialize if needed
|
||||||
@@ -2173,6 +2219,12 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
|
|
||||||
|
// Preserve aborted state - don't override if user aborted
|
||||||
|
if (isAbortedState(existing.state)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
state: targetState,
|
state: targetState,
|
||||||
@@ -2191,6 +2243,10 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
// Update the individual tool call in toolCallsById so ToolCall component gets latest state
|
// Update the individual tool call in toolCallsById so ToolCall component gets latest state
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
if (toolCallsById[toolCallId]) {
|
if (toolCallsById[toolCallId]) {
|
||||||
|
// Also check toolCallsById state in case it was aborted there
|
||||||
|
if (isAbortedState(toolCallsById[toolCallId].state)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
const updatedMap = {
|
const updatedMap = {
|
||||||
...toolCallsById,
|
...toolCallsById,
|
||||||
[toolCallId]: updatedSubAgentToolCall,
|
[toolCallId]: updatedSubAgentToolCall,
|
||||||
@@ -2385,6 +2441,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const { isSendingMessage } = get()
|
const { isSendingMessage } = get()
|
||||||
if (isSendingMessage) get().abortMessage()
|
if (isSendingMessage) get().abortMessage()
|
||||||
|
|
||||||
|
// Prefetch server-executed tools list (for skipping client execution)
|
||||||
|
prefetchServerExecutedTools()
|
||||||
|
|
||||||
// Abort all in-progress tools and clear any diff preview
|
// Abort all in-progress tools and clear any diff preview
|
||||||
abortAllInProgressTools(set, get)
|
abortAllInProgressTools(set, get)
|
||||||
try {
|
try {
|
||||||
@@ -3063,9 +3122,9 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const map = { ...get().toolCallsById }
|
const map = { ...get().toolCallsById }
|
||||||
const current = map[id]
|
const current = map[id]
|
||||||
if (!current) return
|
if (!current) return
|
||||||
// Preserve rejected state from being overridden
|
// Preserve rejected/aborted state from being overridden with success
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) &&
|
(isRejectedState(current.state) || isAbortedState(current.state)) &&
|
||||||
(newState === 'success' || newState === (ClientToolCallState as any).success)
|
(newState === 'success' || newState === (ClientToolCallState as any).success)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
@@ -3143,8 +3202,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
if (!id) return
|
if (!id) return
|
||||||
const current = toolCallsById[id]
|
const current = toolCallsById[id]
|
||||||
if (!current) return
|
if (!current) return
|
||||||
// Do not override a rejected tool with success
|
// Do not override a rejected or aborted tool with success
|
||||||
if (isRejectedState(current.state) && targetState === (ClientToolCallState as any).success) {
|
if (
|
||||||
|
(isRejectedState(current.state) || isAbortedState(current.state)) &&
|
||||||
|
targetState === (ClientToolCallState as any).success
|
||||||
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3862,11 +3924,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const success = result.success && result.result?.success
|
const success = result.success && result.result?.success
|
||||||
const completeMap = { ...get().toolCallsById }
|
const completeMap = { ...get().toolCallsById }
|
||||||
|
|
||||||
// Do not override terminal review/rejected
|
// Do not override terminal review/rejected/aborted
|
||||||
if (
|
if (
|
||||||
isRejectedState(completeMap[id]?.state) ||
|
isRejectedState(completeMap[id]?.state) ||
|
||||||
isReviewState(completeMap[id]?.state) ||
|
isReviewState(completeMap[id]?.state) ||
|
||||||
isBackgroundState(completeMap[id]?.state)
|
isBackgroundState(completeMap[id]?.state) ||
|
||||||
|
isAbortedState(completeMap[id]?.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -3911,11 +3974,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
} catch {}
|
} catch {}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const errorMap = { ...get().toolCallsById }
|
const errorMap = { ...get().toolCallsById }
|
||||||
// Do not override terminal review/rejected
|
// Do not override terminal review/rejected/aborted
|
||||||
if (
|
if (
|
||||||
isRejectedState(errorMap[id]?.state) ||
|
isRejectedState(errorMap[id]?.state) ||
|
||||||
isReviewState(errorMap[id]?.state) ||
|
isReviewState(errorMap[id]?.state) ||
|
||||||
isBackgroundState(errorMap[id]?.state)
|
isBackgroundState(errorMap[id]?.state) ||
|
||||||
|
isAbortedState(errorMap[id]?.state)
|
||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user